From 734e35f322968eeadc93effcd852111c979317e7 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Fri, 12 Jan 2024 17:12:26 -0600 Subject: [PATCH 01/18] Add configuration prototype Fixes #3631 --- _configuration/LICENSE | 201 ++++ _configuration/README.rst | 42 + _configuration/noxfile.py | 12 + _configuration/pyproject.toml | 53 + _configuration/requirements.txt | 3 + .../opentelemetry/configuration/__init__.py | 39 + .../configuration/_internal/__init__.py | 452 +++++++ .../configuration/_internal/path_function.py | 1066 +++++++++++++++++ .../_internal/templates/template.jinja2 | 70 ++ .../src/opentelemetry/configuration/py.typed | 0 .../opentelemetry/configuration/version.py | 15 + _configuration/tests/__init__.py | 13 + _configuration/tests/data/common.json | 58 + .../tests/data/configuration_0.yaml | 380 ++++++ .../tests/data/configuration_1.yaml | 380 ++++++ .../tests/data/logger_provider.json | 109 ++ _configuration/tests/data/meter_provider.json | 264 ++++ .../data/opentelemetry_configuration.json | 50 + _configuration/tests/data/propagator.json | 17 + _configuration/tests/data/resource.json | 27 + .../tests/data/tracer_provider.json | 220 ++++ _configuration/tests/test_configuration.py | 164 +++ 22 files changed, 3635 insertions(+) create mode 100644 _configuration/LICENSE create mode 100644 _configuration/README.rst create mode 100644 _configuration/noxfile.py create mode 100644 _configuration/pyproject.toml create mode 100644 _configuration/requirements.txt create mode 100644 _configuration/src/opentelemetry/configuration/__init__.py create mode 100644 _configuration/src/opentelemetry/configuration/_internal/__init__.py create mode 100644 _configuration/src/opentelemetry/configuration/_internal/path_function.py create mode 100644 _configuration/src/opentelemetry/configuration/_internal/templates/template.jinja2 create mode 100644 _configuration/src/opentelemetry/configuration/py.typed create mode 100644 _configuration/src/opentelemetry/configuration/version.py create mode 100644 _configuration/tests/__init__.py create mode 100644 _configuration/tests/data/common.json create mode 100644 _configuration/tests/data/configuration_0.yaml create mode 100644 _configuration/tests/data/configuration_1.yaml create mode 100644 _configuration/tests/data/logger_provider.json create mode 100644 _configuration/tests/data/meter_provider.json create mode 100644 _configuration/tests/data/opentelemetry_configuration.json create mode 100644 _configuration/tests/data/propagator.json create mode 100644 _configuration/tests/data/resource.json create mode 100644 _configuration/tests/data/tracer_provider.json create mode 100644 _configuration/tests/test_configuration.py diff --git a/_configuration/LICENSE b/_configuration/LICENSE new file mode 100644 index 00000000000..1ef7dad2c5c --- /dev/null +++ b/_configuration/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright The OpenTelemetry Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/_configuration/README.rst b/_configuration/README.rst new file mode 100644 index 00000000000..d24dc67dac0 --- /dev/null +++ b/_configuration/README.rst @@ -0,0 +1,42 @@ +OpenTelemetry Python Configuration Prototype +============================================ + +This component is EXPERIMENTAL and subject to any kind of change at any moment. + +This prototype first needs the ``src/opentelemetry/configuration/_interna/path_function.py`` +to be generated with the ``opentelemetry.configuration.render_schema`` function. + +Once this file is generated, implement the functions defined there. + +To create any provider object first create a ``Resource`` object: + +.. code-block:: python + + from opentelemetry.configuration._internal.path_function import set_resource + from opentelemetry.configuration import ( + resolve_schema, + process_schema, + create_object, + validate_configuration, + ) + from pathlib import Path + + data_path = Path(__file__).parent.joinpath("data") + + configuration = validate_configuration( + data_path.joinpath("kitchen-sink.yaml") + ) + + processed_schema = process_schema( + resolve_schema( + data_path.joinpath("opentelemetry_configuration.json") + ) + ) + + set_resource( + create_object(configuration, processed_schema, "resource") + ) + + tracer_provider = create_object( + configuration, processed_schema, "tracer_provider" + ) diff --git a/_configuration/noxfile.py b/_configuration/noxfile.py new file mode 100644 index 00000000000..614a99fd136 --- /dev/null +++ b/_configuration/noxfile.py @@ -0,0 +1,12 @@ +from nox import session + + +@session(python=["3.11"], reuse_venv=True) +def test(session): + session.install(".") + session.install("-r", "requirements.txt") + + if session.posargs: + session.run("pytest", *session.posargs) + else: + session.run("pytest", "tests/test_configuration.py") diff --git a/_configuration/pyproject.toml b/_configuration/pyproject.toml new file mode 100644 index 00000000000..3f4d97ee97e --- /dev/null +++ b/_configuration/pyproject.toml @@ -0,0 +1,53 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "opentelemetry-configuration" +dynamic = ["version"] +description = "OpenTelemetry Python Configuration" +readme = "README.rst" +license = "Apache-2.0" +requires-python = ">=3.7" +authors = [ + { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" }, +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Typing :: Typed", +] +dependencies = [ + "opentelemetry-api", + "opentelemetry-sdk", + "opentelemetry-exporter-otlp", + "opentelemetry-exporter-jaeger", + "opentelemetry-exporter-zipkin", + "jsonschema", + "pyyaml", + "jsonref", + "jinja2" +] + +[project.urls] +Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-configuration" + +[tool.hatch.version] +path = "src/opentelemetry/configuration/version.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/src", + "/tests", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/opentelemetry"] diff --git a/_configuration/requirements.txt b/_configuration/requirements.txt new file mode 100644 index 00000000000..d0f5095c8c9 --- /dev/null +++ b/_configuration/requirements.txt @@ -0,0 +1,3 @@ +pytest +pdbpp +ipdb diff --git a/_configuration/src/opentelemetry/configuration/__init__.py b/_configuration/src/opentelemetry/configuration/__init__.py new file mode 100644 index 00000000000..e2dbadcfd1d --- /dev/null +++ b/_configuration/src/opentelemetry/configuration/__init__.py @@ -0,0 +1,39 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The OpenTelemetry Configuration package is an implementation of the +OpenTelemetry Configuration Specification +""" + + +from opentelemetry.configuration._internal import ( + resolve_schema, + validate_configuration, + process_schema, + render_schema, + create_object, + load_configuration, + substitute_environment_variables, +) + +__all__ = [ + "resolve_schema", + "validate_configuration", + "process_schema", + "render_schema", + "create_object", + "load_configuration", + "substitute_environment_variables", +] diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_configuration/src/opentelemetry/configuration/_internal/__init__.py new file mode 100644 index 00000000000..b8903cb2c42 --- /dev/null +++ b/_configuration/src/opentelemetry/configuration/_internal/__init__.py @@ -0,0 +1,452 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ipdb import set_trace +from os import environ +from yaml import safe_load +from re import compile as re_compile +from jsonref import JsonRef +from os.path import exists +from pathlib import Path +from os import getcwd +from collections import OrderedDict +from json import loads as json_loads +from jsonref import loads as jsonref_loads +from jsonschema.validators import Draft202012Validator +from referencing import Registry, Resource +from opentelemetry.configuration._internal.path_function import path_function +from jinja2 import Environment, FileSystemLoader + +set_trace + +_environment_variable_regex = re_compile(r"\$\{([a-zA-Z]\w*)\}") +_type_type = { + "integer": int, + "boolean": bool, + "string": str, + "array": list, + "object": object, + "number": float +} + + +def resolve_schema(json_file_path) -> dict: + + root_path = json_file_path.absolute() + + with open(json_file_path, "r") as json_file: + dictionary = jsonref_loads( + json_file.read(), base_uri=root_path.as_uri() + ) + + return dictionary + + +def load_configuration(configuration_file_path: str) -> dict: + + with open(configuration_file_path, "r") as configuration_file: + + return safe_load(configuration_file) + + +def validate_configuration(configuration: dict): + + root_path = Path(getcwd()).parent.parent + + schema_path = str( + root_path. + joinpath("schema"). + joinpath("opentelemetry_configuration.json") + ) + + if not exists(schema_path): + raise Exception(f"{schema_path} does not exist") + + def retrieve_from_path(path: str): + return Resource.from_contents(json_loads(Path(path).read_text())) + + Draft202012Validator( + {"$ref": schema_path}, + registry=Registry(retrieve=retrieve_from_path) + ).validate(configuration) + + +def process_schema(schema: dict) -> dict: + + def traverse( + schema: dict, + schema_value_id_stack: list, + schema_key_stack: list, + recursive_path: list, + processed_schema: dict, + ): + schema_value_type = schema.get("type") + + if schema_value_type == "array": + traverse( + schema["items"], + schema_value_id_stack, + schema_key_stack, + recursive_path, + processed_schema, + ) + + elif schema_value_type == "object": + + schema_properties = schema.get("properties", {}) + + all_attributes = set(schema_properties.keys()) + + positional_attributes = set(schema.get("required", [])) + + optional_attributes = ( + all_attributes.difference(positional_attributes) + ) + + positional_attributes = sorted(list(positional_attributes)) + optional_attributes = sorted(list(optional_attributes)) + + result_positional_attributes = OrderedDict() + result_optional_attributes = OrderedDict() + + for positional_attribute in positional_attributes: + + result_positional_attributes[positional_attribute] = ( + str( + _type_type[ + schema_properties[positional_attribute]["type"] + ].__name__ + ) + ) + + for optional_attribute in optional_attributes: + + result_optional_attributes[optional_attribute] = ( + str( + _type_type[ + schema_properties[optional_attribute]["type"] + ].__name__ + ) + ) + + children = {} + + children.update(result_positional_attributes) + children.update(result_optional_attributes) + + processed_schema[schema_key_stack[-1]] = { + "function_name": "_".join(schema_key_stack[1:]), + "positional_attributes": result_positional_attributes, + "optional_attributes": result_optional_attributes, + "additional_properties": ( + schema.get("additionalProperties", False) + or "patternProperties" in schema.keys() + ), + "recursive_path": recursive_path, + "children": children + } + + if recursive_path: + return + + for ( + schema_properties_key, + schema_properties_value + ) in schema_properties.items(): + + schema_properties_value_type = ( + schema_properties_value.get("type") + ) + + if ( + schema_properties_value_type != "object" + and schema_properties_value_type != "array" + ): + continue + + if isinstance(schema_properties_value, JsonRef): + schema_properties_value_id = ( + id(schema_properties_value.__subject__) + ) + + else: + schema_properties_value_id = id(schema_properties_value) + + is_recursive = ( + schema_properties_value_id in schema_value_id_stack + ) + + schema_value_id_stack.append(schema_properties_value_id) + schema_key_stack.append(schema_properties_key) + + recursive_path = [] + + if is_recursive: + + for ( + current_schema_key_stack, + current_schema_value_id + ) in zip( + schema_key_stack[1:], + schema_value_id_stack + ): + recursive_path.append(current_schema_key_stack) + if ( + schema_properties_value_id + == current_schema_value_id + ): + break + + traverse( + schema_properties_value, + schema_value_id_stack, + schema_key_stack, + recursive_path, + children, + ) + + schema_value_id_stack.pop() + schema_key_stack.pop() + + processed_schema = {} + + traverse(schema, [], [""], [], processed_schema) + + return processed_schema[""]["children"] + + +def render_schema(processed_schema: dict, path_function_path: Path): + + def traverse( + processed_schema: dict, + schema_function: dict, + function_arguments: dict, + ): + + for ( + processed_schema_key, + processed_schema_value + ) in processed_schema.items(): + + if not isinstance(processed_schema_value, dict): + continue + + function_arguments[processed_schema_value["function_name"]] = { + "optional_attributes": ( + processed_schema_value["optional_attributes"] + ), + "positional_attributes": ( + processed_schema_value["positional_attributes"] + ), + "additional_properties": ( + processed_schema_value["additional_properties"] + ), + } + + schema_function_children = {} + schema_function[processed_schema_key] = { + "function": processed_schema_value["function_name"], + "children": schema_function_children, + "recursive_path": processed_schema_value["recursive_path"] + } + + children = processed_schema_value["children"] + + if children: + traverse( + children, schema_function_children, function_arguments + ) + + schema_function = {} + function_arguments = {} + traverse(processed_schema, schema_function, function_arguments) + + current_path = Path(__file__).parent + + environment = Environment( + loader=FileSystemLoader(current_path.joinpath("templates")) + ) + + with open(path_function_path, "w") as result_py_file: + + result_py_file.write( + "\n".join( + [ + f"{line} # noqa" if len(line) > 80 else line + for line in environment.get_template("template.jinja2"). + render(locals()).split("\n") + ] + ) + ) + + +def create_object( + configuration: dict, processed_schema: dict, object_name: str +) -> object: + + def create_object( + configuration: dict, + processed_schema: dict, + path_function: dict, + original_processed_schema: dict, + original_path_function: dict, + ) -> object: + + positional_arguments = [] + optional_arguments = {} + + for configuration_key, configuration_value in ( + configuration.items() + ): + + if isinstance(configuration_value, dict): + + if processed_schema["recursive_path"]: + + new_processed_schema = original_processed_schema + new_path_function = original_path_function + + for path in processed_schema["recursive_path"]: + new_processed_schema = ( + new_processed_schema[path]["children"] + ) + new_path_function = ( + new_path_function[path]["children"] + ) + + new_processed_schema = ( + new_processed_schema[configuration_key] + ) + new_path_function = ( + new_path_function[configuration_key] + ) + else: + new_processed_schema = ( + processed_schema["children"][configuration_key] + ) + new_path_function = ( + path_function["children"][configuration_key] + ) + + object_ = create_object( + configuration_value, + new_processed_schema, + new_path_function, + original_processed_schema, + original_path_function, + ) + + elif isinstance(configuration_value, list): + + object_ = [] + + for element in configuration_value: + + object_.append( + create_object( + element, + processed_schema["children"][configuration_key], + path_function["children"][configuration_key], + original_processed_schema, + original_path_function, + ) + ) + + else: + + object_ = configuration_value + + if configuration_key in ( + processed_schema["positional_attributes"].keys() + ): + positional_arguments.append(object_) + + else: + optional_arguments[configuration_key] = object_ + + return path_function["function"]( + *positional_arguments, **optional_arguments + ) + + return create_object( + configuration[object_name], + processed_schema[object_name], + path_function[object_name], + processed_schema, + path_function, + ) + + +def substitute_environment_variables( + configuration: dict, + processed_schema: dict +) -> dict: + + def traverse( + configuration: dict, + processed_schema: dict, + original_processed_schema: dict + ): + + for configuration_key, configuration_value in configuration.items(): + + if configuration_key not in processed_schema.keys(): + continue + + if isinstance(configuration_value, dict): + + recursive_paths = ( + processed_schema[configuration_key]["recursive_path"] + ) + + if recursive_paths: + + children = original_processed_schema + + for recursive_path in recursive_paths: + children = children[recursive_path]["children"] + + else: + children = processed_schema[configuration_key]["children"] + + traverse( + configuration_value, + children, + original_processed_schema + ) + + elif isinstance(configuration_value, list): + + for element in configuration_value: + if isinstance(element, dict): + traverse( + element, + processed_schema[configuration_key]["children"], + original_processed_schema + ) + + elif isinstance(configuration_value, str): + + match = _environment_variable_regex.match(configuration_value) + + if match is not None: + + configuration[configuration_key] = ( + __builtins__[processed_schema[configuration_key]] + (environ.get(match.group(1))) + ) + + traverse(configuration, processed_schema, processed_schema) + + return configuration diff --git a/_configuration/src/opentelemetry/configuration/_internal/path_function.py b/_configuration/src/opentelemetry/configuration/_internal/path_function.py new file mode 100644 index 00000000000..37830985558 --- /dev/null +++ b/_configuration/src/opentelemetry/configuration/_internal/path_function.py @@ -0,0 +1,1066 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import ( + TracerProvider, SynchronousMultiSpanProcessor, SpanLimits +) +from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, ConsoleSpanExporter, SimpleSpanProcessor +) +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import ( + OTLPSpanExporter as GRPCOTLPSpanExporter +) +from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( + OTLPSpanExporter as HTTPOTLPSpanExporter +) +from opentelemetry.exporter.zipkin.proto.http import ZipkinExporter +from opentelemetry.sdk.trace.sampling import ( + ParentBasedTraceIdRatio, + ALWAYS_OFF, + ALWAYS_ON, +) +from urllib.parse import urlparse +from unittest.mock import Mock + +_resource = None + + +def set_resource(resource): + global _resource + _resource = resource + + +def attribute_limits( + attribute_count_limit: int = None, + attribute_value_length_limit: int = None, + **kwargs +): + pass + + +def logger_provider( + limits: object = None, + processors: list = None +): + pass + + +def logger_provider_processors( + batch: object = None, + simple: object = None, + **kwargs +): + pass + + +def logger_provider_processors_batch( + exporter: object, + export_timeout: int = None, + max_export_batch_size: int = None, + max_queue_size: int = None, + schedule_delay: int = None +): + pass + + +def logger_provider_processors_batch_exporter( + otlp: object = None, + **kwargs +): + pass + + +def logger_provider_processors_batch_exporter_otlp( + endpoint: str, + protocol: str, + certificate: str = None, + client_certificate: str = None, + client_key: str = None, + compression: str = None, + headers: object = None, + timeout: int = None +): + pass + + +def logger_provider_processors_batch_exporter_otlp_headers( + **kwargs +): + pass + + +def logger_provider_processors_simple( + exporter: object +): + pass + + +def logger_provider_processors_simple_exporter( + otlp: object = None, + **kwargs +): + pass + + +def logger_provider_processors_simple_exporter_otlp( + endpoint: str, + protocol: str, + certificate: str = None, + client_certificate: str = None, + client_key: str = None, + compression: str = None, + headers: object = None, + timeout: int = None +): + pass + + +def logger_provider_processors_simple_exporter_otlp_headers( + **kwargs +): + pass + + +def logger_provider_limits( + attribute_count_limit: int = None, + attribute_value_length_limit: int = None +): + pass + + +def meter_provider( + readers: list = None, + views: list = None +): + pass + + +def meter_provider_readers( + periodic: object = None, + pull: object = None +): + pass + + +def meter_provider_readers_periodic( + exporter: object, + interval: int = None, + timeout: int = None +): + pass + + +def meter_provider_readers_periodic_exporter( + console: object = None, + otlp: object = None, + prometheus: object = None, + **kwargs +): + pass + + +def meter_provider_readers_periodic_exporter_otlp( + endpoint: str, + protocol: str, + certificate: str = None, + client_certificate: str = None, + client_key: str = None, + compression: str = None, + default_histogram_aggregation: str = None, + headers: object = None, + temporality_preference: str = None, + timeout: int = None +): + pass + + +def meter_provider_readers_periodic_exporter_otlp_headers( + **kwargs +): + pass + + +def meter_provider_readers_periodic_exporter_console(): + pass + + +def meter_provider_readers_periodic_exporter_prometheus( + host: str = None, + port: int = None +): + pass + + +def meter_provider_readers_pull( + exporter: object +): + pass + + +def meter_provider_readers_pull_exporter( + console: object = None, + otlp: object = None, + prometheus: object = None, + **kwargs +): + pass + + +def meter_provider_readers_pull_exporter_otlp( + endpoint: str, + protocol: str, + certificate: str = None, + client_certificate: str = None, + client_key: str = None, + compression: str = None, + default_histogram_aggregation: str = None, + headers: object = None, + temporality_preference: str = None, + timeout: int = None +): + pass + + +def meter_provider_readers_pull_exporter_otlp_headers( + **kwargs +): + pass + + +def meter_provider_readers_pull_exporter_console(): + pass + + +def meter_provider_readers_pull_exporter_prometheus( + host: str = None, + port: int = None +): + pass + + +def meter_provider_views( + selector: object = None, + stream: object = None +): + pass + + +def meter_provider_views_selector( + instrument_name: str = None, + instrument_type: str = None, + meter_name: str = None, + meter_schema_url: str = None, + meter_version: str = None, + unit: str = None +): + pass + + +def meter_provider_views_stream( + aggregation: object = None, + attribute_keys: list = None, + description: str = None, + name: str = None +): + pass + + +def meter_provider_views_stream_aggregation( + base2_exponential_bucket_histogram: object = None, + default: object = None, + drop: object = None, + explicit_bucket_histogram: object = None, + last_value: object = None, + sum: object = None +): + pass + + +def meter_provider_views_stream_aggregation_default(): + pass + + +def meter_provider_views_stream_aggregation_drop(): + pass + + +def meter_provider_views_stream_aggregation_explicit_bucket_histogram( + boundaries: list = None, + record_min_max: bool = None +): + pass + + +def meter_provider_views_stream_aggregation_base2_exponential_bucket_histogram( + max_scale: int = None, + max_size: int = None, + record_min_max: bool = None +): + pass + + +def meter_provider_views_stream_aggregation_last_value(): + pass + + +def meter_provider_views_stream_aggregation_sum(): + pass + + +def propagator( + composite: list = None, + **kwargs +): + pass + + +def tracer_provider( + limits: object = None, + processors: list = None, + sampler: object = None +): + # FIXME how to define shutdown_on_exit? + # FIXME how to define id_generator? + # FIXME how to define if the span processors should be synchronous or not? + + synchronous_multi_span_processor = SynchronousMultiSpanProcessor() + + if processors is not None: + for processor in processors: + synchronous_multi_span_processor.add_span_processor(processor) + + return TracerProvider( + sampler=sampler, + resource=_resource, + active_span_processor=synchronous_multi_span_processor, + span_limits=limits + ) + + +def tracer_provider_processors( + batch: object = None, + simple: object = None, + **kwargs +): + return batch or simple + + +def tracer_provider_processors_batch( + exporter: object, + export_timeout: int = None, + max_export_batch_size: int = None, + max_queue_size: int = None, + schedule_delay: int = None +): + return BatchSpanProcessor( + exporter, + max_queue_size=max_queue_size, + schedule_delay_millis=schedule_delay, + max_export_batch_size=max_export_batch_size, + export_timeout_millis=export_timeout + ) + + +def tracer_provider_processors_batch_exporter( + console: object = None, + otlp: object = None, + zipkin: object = None, + **kwargs +): + return console or otlp or zipkin + + +def tracer_provider_processors_batch_exporter_otlp( + endpoint: str, + protocol: str, + certificate: str = None, + client_certificate: str = None, + client_key: str = None, + compression: str = None, + headers: object = None, + timeout: int = None +): + protocol = urlparse(protocol).scheme + + if protocol.startswith("http"): + exporter_class = HTTPOTLPSpanExporter + + else: + exporter_class = GRPCOTLPSpanExporter + + return exporter_class( + endpoint=endpoint, + # insecure=None, + # FIXME somehow create credentials here + # from grpc.credentials import create_credentials + # credentials=create_credentials() + headers=headers, + timeout=timeout, + # compression=compression + ) + + +def tracer_provider_processors_batch_exporter_otlp_headers( + **kwargs +): + return kwargs + + +def tracer_provider_processors_batch_exporter_console(): + return ConsoleSpanExporter + pass + + +def tracer_provider_processors_batch_exporter_zipkin( + endpoint: str, + timeout: int = None +): + return ZipkinExporter(endpoint, timeout=timeout) + + +def tracer_provider_processors_simple( + exporter: object +): + return SimpleSpanProcessor(exporter) + + +def tracer_provider_processors_simple_exporter( + console: object = None, + otlp: object = None, + zipkin: object = None, + **kwargs +): + return console or otlp or zipkin + + +def tracer_provider_processors_simple_exporter_otlp( + endpoint: str, + protocol: str, + certificate: str = None, + client_certificate: str = None, + client_key: str = None, + compression: str = None, + headers: object = None, + timeout: int = None +): + protocol = urlparse(protocol).scheme + + if protocol.startswith("http"): + exporter_class = HTTPOTLPSpanExporter + + else: + exporter_class = GRPCOTLPSpanExporter + + return exporter_class( + endpoint=endpoint, + # insecure=None, + # FIXME somehow create credentials here + # from grpc.credentials import create_credentials + # credentials=create_credentials() + headers=headers, + timeout=timeout, + # compression=compression + ) + + +def tracer_provider_processors_simple_exporter_otlp_headers( + **kwargs +): + return kwargs + + +def tracer_provider_processors_simple_exporter_console(): + return ConsoleSpanExporter() + + +def tracer_provider_processors_simple_exporter_zipkin( + endpoint: str, + timeout: int = None +): + return ZipkinExporter(endpoint, timeout=timeout) + + +def tracer_provider_limits( + attribute_count_limit: int = None, + attribute_value_length_limit: int = None, + event_attribute_count_limit: int = None, + event_count_limit: int = None, + link_attribute_count_limit: int = None, + link_count_limit: int = None +): + return SpanLimits( + max_span_attributes=attribute_count_limit, + max_span_attribute_length=attribute_value_length_limit, + max_event_attributes=event_count_limit, + max_events=event_count_limit, + max_link_attributes=link_attribute_count_limit, + max_links=link_count_limit, + ) + + +def tracer_provider_sampler( + always_off: object = None, + always_on: object = None, + jaeger_remote: object = None, + parent_based: object = None, + trace_id_ratio_based: object = None, + **kwargs +): + return Mock( + type="sampler", + always_off=always_off, + always_on=always_on, + jaeger_remote=jaeger_remote, + parent_based=parent_based, + trace_id_ratio_based=trace_id_ratio_based, + **kwargs + ) + + +def tracer_provider_sampler_always_off(): + return ALWAYS_OFF + + +def tracer_provider_sampler_always_on(): + return ALWAYS_ON + + +def tracer_provider_sampler_jaeger_remote( + endpoint: str = None, + initial_sampler: object = None, + interval: int = None +): + return Mock( + type="jaeger_remote", + endpoint=endpoint, + initial_sampler=initial_sampler, + interval=interval + ) + + +def tracer_provider_sampler_jaeger_remote_initial_sampler( + always_off: object = None, + always_on: object = None, + jaeger_remote: object = None, + parent_based: object = None, + trace_id_ratio_based: object = None, + **kwargs +): + return Mock( + type="initial_sampler", + always_off=always_off, + always_on=always_on, + jaeger_remote=jaeger_remote, + parent_based=parent_based, + trace_id_ratio_based=trace_id_ratio_based, + **kwargs + ) + + +def tracer_provider_sampler_parent_based( + local_parent_not_sampled: object = None, + local_parent_sampled: object = None, + remote_parent_not_sampled: object = None, + remote_parent_sampled: object = None, + root: object = None +): + return Mock( + type="parent_based", + local_parent_not_sampled=local_parent_not_sampled, + local_parent_sampled=local_parent_sampled, + remote_parent_not_sampled=remote_parent_not_sampled, + remote_parent_sampled=remote_parent_sampled, + root=root, + ) + + +def tracer_provider_sampler_parent_based_root( + always_off: object = None, + always_on: object = None, + jaeger_remote: object = None, + parent_based: object = None, + trace_id_ratio_based: object = None, + **kwargs +): + return Mock( + type="root", + always_off=always_off, + always_on=always_on, + jaeger_remote=jaeger_remote, + parent_based=parent_based, + trace_id_ratio_based=trace_id_ratio_based, + **kwargs + ) + + +def tracer_provider_sampler_parent_based_remote_parent_sampled( + always_off: object = None, + always_on: object = None, + jaeger_remote: object = None, + parent_based: object = None, + trace_id_ratio_based: object = None, + **kwargs +): + return Mock( + type="remote_parent_sampled", + always_off=always_off, + always_on=always_on, + jaeger_remote=jaeger_remote, + parent_based=parent_based, + trace_id_ratio_based=trace_id_ratio_based, + **kwargs + ) + + +def tracer_provider_sampler_parent_based_remote_parent_not_sampled( + always_off: object = None, + always_on: object = None, + jaeger_remote: object = None, + parent_based: object = None, + trace_id_ratio_based: object = None, + **kwargs +): + return Mock( + type="remote_parent_not_sampled", + always_off=always_off, + always_on=always_on, + jaeger_remote=jaeger_remote, + parent_based=parent_based, + trace_id_ratio_based=trace_id_ratio_based, + **kwargs + ) + + +def tracer_provider_sampler_parent_based_local_parent_sampled( + always_off: object = None, + always_on: object = None, + jaeger_remote: object = None, + parent_based: object = None, + trace_id_ratio_based: object = None, + **kwargs +): + return Mock( + type="local_parent_sampled", + always_off=always_off, + always_on=always_on, + jaeger_remote=jaeger_remote, + parent_based=parent_based, + trace_id_ratio_based=trace_id_ratio_based, + **kwargs + ) + + +def tracer_provider_sampler_parent_based_local_parent_not_sampled( + always_off: object = None, + always_on: object = None, + jaeger_remote: object = None, + parent_based: object = None, + trace_id_ratio_based: object = None, + **kwargs +): + return Mock( + type="local_parent_not_sampled", + always_off=always_off, + always_on=always_on, + jaeger_remote=jaeger_remote, + parent_based=parent_based, + trace_id_ratio_based=trace_id_ratio_based, + **kwargs + ) + + +def tracer_provider_sampler_trace_id_ratio_based( + ratio: float = None +): + return ParentBasedTraceIdRatio(ratio) + + +def resource( + attributes: object = None, + schema_url: str = None +): + return Resource.create(attributes=attributes, schema_url=schema_url) + + +def resource_attributes( + service_name: str = None, + **kwargs +): + return {"service.name": service_name, **kwargs} + + +path_function = { + "attribute_limits": { + "function": attribute_limits, + "children": {}, + "recursive_path": [], + }, + "logger_provider": { + "function": logger_provider, + "children": { + "processors": { + "function": logger_provider_processors, + "children": { + "batch": { + "function": logger_provider_processors_batch, + "children": { + "exporter": { + "function": logger_provider_processors_batch_exporter, # noqa + "children": { + "otlp": { + "function": logger_provider_processors_batch_exporter_otlp, # noqa + "children": { + "headers": { + "function": logger_provider_processors_batch_exporter_otlp_headers, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "simple": { + "function": logger_provider_processors_simple, + "children": { + "exporter": { + "function": logger_provider_processors_simple_exporter, # noqa + "children": { + "otlp": { + "function": logger_provider_processors_simple_exporter_otlp, # noqa + "children": { + "headers": { + "function": logger_provider_processors_simple_exporter_otlp_headers, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "limits": { + "function": logger_provider_limits, + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "meter_provider": { + "function": meter_provider, + "children": { + "readers": { + "function": meter_provider_readers, + "children": { + "periodic": { + "function": meter_provider_readers_periodic, + "children": { + "exporter": { + "function": meter_provider_readers_periodic_exporter, # noqa + "children": { + "otlp": { + "function": meter_provider_readers_periodic_exporter_otlp, # noqa + "children": { + "headers": { + "function": meter_provider_readers_periodic_exporter_otlp_headers, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "console": { + "function": meter_provider_readers_periodic_exporter_console, # noqa + "children": {}, + "recursive_path": [], + }, + "prometheus": { + "function": meter_provider_readers_periodic_exporter_prometheus, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "pull": { + "function": meter_provider_readers_pull, + "children": { + "exporter": { + "function": meter_provider_readers_pull_exporter, # noqa + "children": { + "otlp": { + "function": meter_provider_readers_pull_exporter_otlp, # noqa + "children": { + "headers": { + "function": meter_provider_readers_pull_exporter_otlp_headers, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "console": { + "function": meter_provider_readers_pull_exporter_console, # noqa + "children": {}, + "recursive_path": [], + }, + "prometheus": { + "function": meter_provider_readers_pull_exporter_prometheus, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "views": { + "function": meter_provider_views, + "children": { + "selector": { + "function": meter_provider_views_selector, + "children": {}, + "recursive_path": [], + }, + "stream": { + "function": meter_provider_views_stream, + "children": { + "aggregation": { + "function": meter_provider_views_stream_aggregation, # noqa + "children": { + "default": { + "function": meter_provider_views_stream_aggregation_default, # noqa + "children": {}, + "recursive_path": [], + }, + "drop": { + "function": meter_provider_views_stream_aggregation_drop, # noqa + "children": {}, + "recursive_path": [], + }, + "explicit_bucket_histogram": { + "function": meter_provider_views_stream_aggregation_explicit_bucket_histogram, # noqa + "children": {}, + "recursive_path": [], + }, + "base2_exponential_bucket_histogram": { + "function": meter_provider_views_stream_aggregation_base2_exponential_bucket_histogram, # noqa + "children": {}, + "recursive_path": [], + }, + "last_value": { + "function": meter_provider_views_stream_aggregation_last_value, # noqa + "children": {}, + "recursive_path": [], + }, + "sum": { + "function": meter_provider_views_stream_aggregation_sum, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "propagator": { + "function": propagator, + "children": {}, + "recursive_path": [], + }, + "tracer_provider": { + "function": tracer_provider, + "children": { + "processors": { + "function": tracer_provider_processors, + "children": { + "batch": { + "function": tracer_provider_processors_batch, + "children": { + "exporter": { + "function": tracer_provider_processors_batch_exporter, # noqa + "children": { + "otlp": { + "function": tracer_provider_processors_batch_exporter_otlp, # noqa + "children": { + "headers": { + "function": tracer_provider_processors_batch_exporter_otlp_headers, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "console": { + "function": tracer_provider_processors_batch_exporter_console, # noqa + "children": {}, + "recursive_path": [], + }, + "zipkin": { + "function": tracer_provider_processors_batch_exporter_zipkin, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "simple": { + "function": tracer_provider_processors_simple, + "children": { + "exporter": { + "function": tracer_provider_processors_simple_exporter, # noqa + "children": { + "otlp": { + "function": tracer_provider_processors_simple_exporter_otlp, # noqa + "children": { + "headers": { + "function": tracer_provider_processors_simple_exporter_otlp_headers, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "console": { + "function": tracer_provider_processors_simple_exporter_console, # noqa + "children": {}, + "recursive_path": [], + }, + "zipkin": { + "function": tracer_provider_processors_simple_exporter_zipkin, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "limits": { + "function": tracer_provider_limits, + "children": {}, + "recursive_path": [], + }, + "sampler": { + "function": tracer_provider_sampler, + "children": { + "always_off": { + "function": tracer_provider_sampler_always_off, + "children": {}, + "recursive_path": [], + }, + "always_on": { + "function": tracer_provider_sampler_always_on, + "children": {}, + "recursive_path": [], + }, + "jaeger_remote": { + "function": tracer_provider_sampler_jaeger_remote, + "children": { + "initial_sampler": { + "function": tracer_provider_sampler_jaeger_remote_initial_sampler, # noqa + "children": {}, + "recursive_path": ['tracer_provider', 'sampler'], # noqa + }, + }, + "recursive_path": [], + }, + "parent_based": { + "function": tracer_provider_sampler_parent_based, + "children": { + "root": { + "function": tracer_provider_sampler_parent_based_root, # noqa + "children": {}, + "recursive_path": ['tracer_provider', 'sampler'], # noqa + }, + "remote_parent_sampled": { + "function": tracer_provider_sampler_parent_based_remote_parent_sampled, # noqa + "children": {}, + "recursive_path": ['tracer_provider', 'sampler'], # noqa + }, + "remote_parent_not_sampled": { + "function": tracer_provider_sampler_parent_based_remote_parent_not_sampled, # noqa + "children": {}, + "recursive_path": ['tracer_provider', 'sampler'], # noqa + }, + "local_parent_sampled": { + "function": tracer_provider_sampler_parent_based_local_parent_sampled, # noqa + "children": {}, + "recursive_path": ['tracer_provider', 'sampler'], # noqa + }, + "local_parent_not_sampled": { + "function": tracer_provider_sampler_parent_based_local_parent_not_sampled, # noqa + "children": {}, + "recursive_path": ['tracer_provider', 'sampler'], # noqa + }, + }, + "recursive_path": [], + }, + "trace_id_ratio_based": { + "function": tracer_provider_sampler_trace_id_ratio_based, # noqa + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, + "resource": { + "function": resource, + "children": { + "attributes": { + "function": resource_attributes, + "children": {}, + "recursive_path": [], + }, + }, + "recursive_path": [], + }, +} diff --git a/_configuration/src/opentelemetry/configuration/_internal/templates/template.jinja2 b/_configuration/src/opentelemetry/configuration/_internal/templates/template.jinja2 new file mode 100644 index 00000000000..7226d6ea683 --- /dev/null +++ b/_configuration/src/opentelemetry/configuration/_internal/templates/template.jinja2 @@ -0,0 +1,70 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +_resource = None + + +def set_resource(resource): + global _resource + _resource = resource + + +{% for function_argument_key, function_argument_value in function_arguments.items() -%} + def {{ function_argument_key }}( + {%- if function_argument_value["positional_attributes"]|length == 0 and function_argument_value["optional_attributes"]|length == 0 and not function_argument_value["additional_properties"] -%} + ): + {%- else -%} + {{- "\n" -}} + {%- for positional_attribute_key, positional_attribute_value in function_argument_value["positional_attributes"].items() -%} + {{- " " }}{{ positional_attribute_key }}: {{ positional_attribute_value -}} + {%- if not loop.last or function_argument_value["optional_attributes"]|length > 0 or function_argument_value["additional_properties"]-%} + {{ ",\n" }} + {%- endif -%} + {%- endfor -%} + {%- for optional_attribute_key, optional_attribute_value in function_argument_value["optional_attributes"].items() -%} + {{- " " }}{{ optional_attribute_key }}: {{ optional_attribute_value }} = None + {%- if not loop.last or function_argument_value["additional_properties"]-%} + {{ ",\n" }} + {%- endif -%} + {%- endfor -%} + {%- if function_argument_value["additional_properties"] -%} + {{ " **kwargs" }} + {%- endif -%} + {{- "\n" -}} + ): + {%- endif -%} + {{- "\n pass\n\n\n" -}} +{%- endfor -%} + +{%- macro render_dict(schema_function, indentation) -%} + {%- for key, value in schema_function.items() -%} + {{- " " * indentation * 4 }}"{{ key }}":{{ " " -}} + {%- if value is mapping -%} + {%- if value|length == 0 -%} + {{- "{},\n" -}} + {%- else -%} + {{- "{\n" -}} + {{- render_dict(value, indentation + 1) -}} + {{- " " * indentation * 4 }}{{ "},\n" -}} + {%- endif -%} + {%- else -%} + {{- value }}{{ ",\n" -}} + {%- endif -%} + {%- endfor -%} +{%- endmacro -%} + +path_function = {{ "{\n" -}} +{{- render_dict(schema_function, 1) -}} +{{- "}" -}} diff --git a/_configuration/src/opentelemetry/configuration/py.typed b/_configuration/src/opentelemetry/configuration/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/_configuration/src/opentelemetry/configuration/version.py b/_configuration/src/opentelemetry/configuration/version.py new file mode 100644 index 00000000000..4effd145cba --- /dev/null +++ b/_configuration/src/opentelemetry/configuration/version.py @@ -0,0 +1,15 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "0.1.0.dev" diff --git a/_configuration/tests/__init__.py b/_configuration/tests/__init__.py new file mode 100644 index 00000000000..b0a6f428417 --- /dev/null +++ b/_configuration/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/_configuration/tests/data/common.json b/_configuration/tests/data/common.json new file mode 100644 index 00000000000..55b14f8363e --- /dev/null +++ b/_configuration/tests/data/common.json @@ -0,0 +1,58 @@ +{ + "$id": "https://opentelemetry.io/otelconfig/common.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Common", + "type": "object", + "$defs": { + "Headers": { + "type": "object", + "title": "Headers", + "patternProperties": { + ".*": { + "type": "string" + } + } + }, + "Otlp": { + "type": "object", + "additionalProperties": false, + "properties": { + "protocol": { + "type": "string", + "pattern": "^(http|grpc)\\/(protobuf|json)" + }, + "endpoint": { + "type": "string" + }, + "certificate": { + "type": "string" + }, + "client_key": { + "type": "string" + }, + "client_certificate": { + "type": "string" + }, + "headers": { + "$ref": "#/$defs/Headers" + }, + "compression": { + "type": "string" + }, + "timeout": { + "type": "integer", + "minimum": 0 + } + }, + "required": [ + "endpoint", + "protocol" + ], + "title": "Otlp" + }, + "Console": { + "type": "object", + "additionalProperties": false + } + } +} \ No newline at end of file diff --git a/_configuration/tests/data/configuration_0.yaml b/_configuration/tests/data/configuration_0.yaml new file mode 100644 index 00000000000..11ea8830778 --- /dev/null +++ b/_configuration/tests/data/configuration_0.yaml @@ -0,0 +1,380 @@ +# kitchen-sink.yaml demonstrates all configurable surface area, including explanatory comments. +# +# It DOES NOT represent expected real world configuration, as it makes strange configuration +# choices in an effort to exercise the full surface area. +# +# Configuration values are set to their defaults when default values are defined. + +# The file format version +file_format: "0.1" + +# Configure if the SDK is disabled or not. This is not required to be provided +# to ensure the SDK isn't disabled, the default value when this is not provided +# is for the SDK to be enabled. +# +# Environment variable: OTEL_SDK_DISABLED +disabled: false + +# Configure general attribute limits. See also tracer_provider.limits, logger_provider.limits. +attribute_limits: + # Configure max attribute value size. + # + # Environment variable: OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max attribute count. + # + # Environment variable: OTEL_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + +# Configure logger provider. +logger_provider: + # Configure log record processors. + processors: + # Configure a batch log record processor. + - batch: + # Configure delay interval (in milliseconds) between two consecutive exports. + # + # Environment variable: OTEL_BLRP_SCHEDULE_DELAY + schedule_delay: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_BLRP_EXPORT_TIMEOUT + export_timeout: 30000 + # Configure maximum queue size. + # + # Environment variable: OTEL_BLRP_MAX_QUEUE_SIZE + max_queue_size: 2048 + # Configure maximum batch size. + # + # Environment variable: OTEL_BLRP_MAX_EXPORT_BATCH_SIZE + max_export_batch_size: 512 + # Configure exporter. + # + # Environment variable: OTEL_LOGS_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_LOGS_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_LOGS_HEADERS + headers: + api-key: "1234" + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_LOGS_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_LOGS_TIMEOUT + timeout: 10000 + # Configure log record limits. See also attribute_limits. + limits: + # Configure max log record attribute value size. Overrides attribute_limits.attribute_value_length_limit. + # + # Environment variable: OTEL_LOGRECORD_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max log record attribute count. Overrides attribute_limits.attribute_count_limit. + # + # Environment variable: OTEL_LOGRECORD_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + +# Configure meter provider. +meter_provider: + # Configure metric readers. + readers: + # Configure a pull-based metric reader. + - pull: + # Configure exporter. + # + # Environment variable: OTEL_METRICS_EXPORTER + exporter: + # Configure exporter to be prometheus. + prometheus: + # Configure host. + # + # Environment variable: OTEL_EXPORTER_PROMETHEUS_HOST + host: localhost + # Configure port. + # + # Environment variable: OTEL_EXPORTER_PROMETHEUS_PORT + port: 9464 + # Configure a periodic metric reader. + - periodic: + # Configure delay interval (in milliseconds) between start of two consecutive exports. + # + # Environment variable: OTEL_METRIC_EXPORT_INTERVAL + interval: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_METRIC_EXPORT_TIMEOUT + timeout: 30000 + # Configure exporter. + # + # Environment variable: OTEL_METRICS_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_METRICS_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_METRICS_HEADERS + headers: + api-key: !!str 1234 + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_METRICS_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_METRICS_TIMEOUT + timeout: 10000 + # Configure temporality preference. + # + # Environment variable: OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE + temporality_preference: delta + # Configure default histogram aggregation. + # + # Environment variable: OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION + default_histogram_aggregation: base2_exponential_bucket_histogram + # Configure a periodic metric reader. + - periodic: + # Configure exporter. + exporter: + # Configure exporter to be console. + console: {} + # Configure views. Each view has a selector which determines the instrument(s) it applies to, and a configuration for the resulting stream(s). + views: + # Configure a view. + - selector: + # Configure instrument name selection criteria. + instrument_name: my-instrument + # Configure instrument type selection criteria. + instrument_type: histogram + # Configure the instrument unit selection criteria. + unit: ms + # Configure meter name selection criteria. + meter_name: my-meter + # Configure meter version selection criteria. + meter_version: 1.0.0 + # Configure meter schema url selection criteria. + meter_schema_url: https://opentelemetry.io/schemas/1.16.0 + # Configure stream. + stream: + # Configure metric name of the resulting stream(s). + name: new_instrument_name + # Configure metric description of the resulting stream(s). + description: new_description + # Configure aggregation of the resulting stream(s). Known values include: default, drop, explicit_bucket_histogram, base2_exponential_bucket_histogram, last_value, sum. + aggregation: + # Configure aggregation to be explicit_bucket_histogram. + explicit_bucket_histogram: + # Configure bucket boundaries. + boundaries: [ 0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, 750.0, 1000.0, 2500.0, 5000.0, 7500.0, 10000.0 ] + # Configure record min and max. + record_min_max: true + # Configure attribute keys retained in the resulting stream(s). + attribute_keys: + - key1 + - key2 + +# Configure text map context propagators. +# +# Environment variable: OTEL_PROPAGATORS +propagator: + composite: [tracecontext, baggage, b3, b3multi, jaeger, xray, ottrace] + +# Configure tracer provider. +tracer_provider: + # Configure span processors. + processors: + # Configure a batch span processor. + - batch: + # Configure delay interval (in milliseconds) between two consecutive exports. + # + # Environment variable: OTEL_BSP_SCHEDULE_DELAY + schedule_delay: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_BSP_EXPORT_TIMEOUT + export_timeout: 30000 + # Configure maximum queue size. + # + # Environment variable: OTEL_BSP_MAX_QUEUE_SIZE + max_queue_size: 2048 + # Configure maximum batch size. + # + # Environment variable: OTEL_BSP_MAX_EXPORT_BATCH_SIZE + max_export_batch_size: 512 + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_TRACES_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_TRACES_HEADERS + headers: + api-key: !!str 1234 + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_TRACES_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_TRACES_TIMEOUT + timeout: 10000 + # Configure a batch span processor. + - batch: + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + exporter: + # Configure exporter to be zipkin. + zipkin: + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_ENDPOINT + endpoint: http://localhost:9411/api/v2/spans + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_TIMEOUT + timeout: 10000 + # Configure a simple span processor. + - simple: + # Configure exporter. + exporter: + # Configure exporter to be console. + console: {} + # Configure span limits. See also attribute_limits. + limits: + # Configure max span attribute value size. Overrides attribute_limits.attribute_value_length_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max span attribute count. Overrides attribute_limits.attribute_count_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + # Configure max span event count. + # + # Environment variable: OTEL_SPAN_EVENT_COUNT_LIMIT + event_count_limit: 128 + # Configure max span link count. + # + # Environment variable: OTEL_SPAN_LINK_COUNT_LIMIT + link_count_limit: 128 + # Configure max attributes per span event. + # + # Environment variable: OTEL_EVENT_ATTRIBUTE_COUNT_LIMIT + event_attribute_count_limit: 128 + # Configure max attributes per span link. + # + # Environment variable: OTEL_LINK_ATTRIBUTE_COUNT_LIMIT + link_attribute_count_limit: 128 + # Configure the sampler. + sampler: + # Configure sampler to be parent_based. Known values include: always_off, always_on, jaeger_remote, parent_based, trace_id_ratio_based. + # + # Environment variable: OTEL_TRACES_SAMPLER=parentbased_* + parent_based: + # Configure root sampler. + # + # Environment variable: OTEL_TRACES_SAMPLER=parentbased_traceidratio + root: + # Configure sampler to be trace_id_ratio_based. + trace_id_ratio_based: + # Configure trace_id_ratio. + # + # Environment variable: OTEL_TRACES_SAMPLER_ARG=traceidratio=0.0001 + ratio: 0.0001 + # Configure remote_parent_sampled sampler. + remote_parent_sampled: + # Configure sampler to be always_on. + always_on: {} + # Configure remote_parent_not_sampled sampler. + remote_parent_not_sampled: + # Configure sampler to be always_off. + always_off: {} + # Configure local_parent_sampled sampler. + local_parent_sampled: + # Configure sampler to be always_on. + always_on: {} + # Configure local_parent_not_sampled sampler. + local_parent_not_sampled: + parent_based: + remote_parent_not_sampled: + trace_id_ratio_based: + ratio: 0.0001 + +# Configure resource for all signals. +resource: + # Configure resource attributes. + # + # Environment variable: OTEL_RESOURCE_ATTRIBUTES + attributes: + # Configure `service.name` resource attribute + # + # Environment variable: OTEL_SERVICE_NAME + service.name: !!str "unknown_service" + # Configure the resource schema URL. + schema_url: https://opentelemetry.io/schemas/1.16.0 diff --git a/_configuration/tests/data/configuration_1.yaml b/_configuration/tests/data/configuration_1.yaml new file mode 100644 index 00000000000..83f4d974d7d --- /dev/null +++ b/_configuration/tests/data/configuration_1.yaml @@ -0,0 +1,380 @@ +# kitchen-sink.yaml demonstrates all configurable surface area, including explanatory comments. +# +# It DOES NOT represent expected real world configuration, as it makes strange configuration +# choices in an effort to exercise the full surface area. +# +# Configuration values are set to their defaults when default values are defined. + +# The file format version +file_format: "0.1" + +# Configure if the SDK is disabled or not. This is not required to be provided +# to ensure the SDK isn't disabled, the default value when this is not provided +# is for the SDK to be enabled. +# +# Environment variable: OTEL_SDK_DISABLED +disabled: false + +# Configure general attribute limits. See also tracer_provider.limits, logger_provider.limits. +attribute_limits: + # Configure max attribute value size. + # + # Environment variable: OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max attribute count. + # + # Environment variable: OTEL_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + +# Configure logger provider. +logger_provider: + # Configure log record processors. + processors: + # Configure a batch log record processor. + - batch: + # Configure delay interval (in milliseconds) between two consecutive exports. + # + # Environment variable: OTEL_BLRP_SCHEDULE_DELAY + schedule_delay: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_BLRP_EXPORT_TIMEOUT + export_timeout: ${OTEL_BLRB_EXPORT_TIMEOUT} + # Configure maximum queue size. + # + # Environment variable: OTEL_BLRP_MAX_QUEUE_SIZE + max_queue_size: 2048 + # Configure maximum batch size. + # + # Environment variable: OTEL_BLRP_MAX_EXPORT_BATCH_SIZE + max_export_batch_size: 512 + # Configure exporter. + # + # Environment variable: OTEL_LOGS_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_LOGS_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_LOGS_HEADERS + headers: + api-key: "1234" + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_LOGS_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_LOGS_TIMEOUT + timeout: 10000 + # Configure log record limits. See also attribute_limits. + limits: + # Configure max log record attribute value size. Overrides attribute_limits.attribute_value_length_limit. + # + # Environment variable: OTEL_LOGRECORD_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max log record attribute count. Overrides attribute_limits.attribute_count_limit. + # + # Environment variable: OTEL_LOGRECORD_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + +# Configure meter provider. +meter_provider: + # Configure metric readers. + readers: + # Configure a pull-based metric reader. + - pull: + # Configure exporter. + # + # Environment variable: OTEL_METRICS_EXPORTER + exporter: + # Configure exporter to be prometheus. + prometheus: + # Configure host. + # + # Environment variable: OTEL_EXPORTER_PROMETHEUS_HOST + host: localhost + # Configure port. + # + # Environment variable: OTEL_EXPORTER_PROMETHEUS_PORT + port: 9464 + # Configure a periodic metric reader. + - periodic: + # Configure delay interval (in milliseconds) between start of two consecutive exports. + # + # Environment variable: OTEL_METRIC_EXPORT_INTERVAL + interval: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_METRIC_EXPORT_TIMEOUT + timeout: 30000 + # Configure exporter. + # + # Environment variable: OTEL_METRICS_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_METRICS_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_METRICS_HEADERS + headers: + api-key: !!str 1234 + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_METRICS_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_METRICS_TIMEOUT + timeout: 10000 + # Configure temporality preference. + # + # Environment variable: OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE + temporality_preference: delta + # Configure default histogram aggregation. + # + # Environment variable: OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION + default_histogram_aggregation: base2_exponential_bucket_histogram + # Configure a periodic metric reader. + - periodic: + # Configure exporter. + exporter: + # Configure exporter to be console. + console: {} + # Configure views. Each view has a selector which determines the instrument(s) it applies to, and a configuration for the resulting stream(s). + views: + # Configure a view. + - selector: + # Configure instrument name selection criteria. + instrument_name: my-instrument + # Configure instrument type selection criteria. + instrument_type: histogram + # Configure the instrument unit selection criteria. + unit: ms + # Configure meter name selection criteria. + meter_name: my-meter + # Configure meter version selection criteria. + meter_version: 1.0.0 + # Configure meter schema url selection criteria. + meter_schema_url: https://opentelemetry.io/schemas/1.16.0 + # Configure stream. + stream: + # Configure metric name of the resulting stream(s). + name: new_instrument_name + # Configure metric description of the resulting stream(s). + description: new_description + # Configure aggregation of the resulting stream(s). Known values include: default, drop, explicit_bucket_histogram, base2_exponential_bucket_histogram, last_value, sum. + aggregation: + # Configure aggregation to be explicit_bucket_histogram. + explicit_bucket_histogram: + # Configure bucket boundaries. + boundaries: [ 0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, 750.0, 1000.0, 2500.0, 5000.0, 7500.0, 10000.0 ] + # Configure record min and max. + record_min_max: true + # Configure attribute keys retained in the resulting stream(s). + attribute_keys: + - key1 + - key2 + +# Configure text map context propagators. +# +# Environment variable: OTEL_PROPAGATORS +propagator: + composite: [tracecontext, baggage, b3, b3multi, jaeger, xray, ottrace] + +# Configure tracer provider. +tracer_provider: + # Configure span processors. + processors: + # Configure a batch span processor. + - batch: + # Configure delay interval (in milliseconds) between two consecutive exports. + # + # Environment variable: OTEL_BSP_SCHEDULE_DELAY + schedule_delay: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_BSP_EXPORT_TIMEOUT + export_timeout: 30000 + # Configure maximum queue size. + # + # Environment variable: OTEL_BSP_MAX_QUEUE_SIZE + max_queue_size: 2048 + # Configure maximum batch size. + # + # Environment variable: OTEL_BSP_MAX_EXPORT_BATCH_SIZE + max_export_batch_size: 512 + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_TRACES_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_TRACES_HEADERS + headers: + api-key: !!str 1234 + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_TRACES_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_TRACES_TIMEOUT + timeout: 10000 + # Configure a batch span processor. + - batch: + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + exporter: + # Configure exporter to be zipkin. + zipkin: + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_ENDPOINT + endpoint: http://localhost:9411/api/v2/spans + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_TIMEOUT + timeout: 10000 + # Configure a simple span processor. + - simple: + # Configure exporter. + exporter: + # Configure exporter to be console. + console: {} + # Configure span limits. See also attribute_limits. + limits: + # Configure max span attribute value size. Overrides attribute_limits.attribute_value_length_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max span attribute count. Overrides attribute_limits.attribute_count_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + # Configure max span event count. + # + # Environment variable: OTEL_SPAN_EVENT_COUNT_LIMIT + event_count_limit: 128 + # Configure max span link count. + # + # Environment variable: OTEL_SPAN_LINK_COUNT_LIMIT + link_count_limit: 128 + # Configure max attributes per span event. + # + # Environment variable: OTEL_EVENT_ATTRIBUTE_COUNT_LIMIT + event_attribute_count_limit: 128 + # Configure max attributes per span link. + # + # Environment variable: OTEL_LINK_ATTRIBUTE_COUNT_LIMIT + link_attribute_count_limit: 128 + # Configure the sampler. + sampler: + # Configure sampler to be parent_based. Known values include: always_off, always_on, jaeger_remote, parent_based, trace_id_ratio_based. + # + # Environment variable: OTEL_TRACES_SAMPLER=parentbased_* + parent_based: + # Configure root sampler. + # + # Environment variable: OTEL_TRACES_SAMPLER=parentbased_traceidratio + root: + # Configure sampler to be trace_id_ratio_based. + trace_id_ratio_based: + # Configure trace_id_ratio. + # + # Environment variable: OTEL_TRACES_SAMPLER_ARG=traceidratio=0.0001 + ratio: 0.0001 + # Configure remote_parent_sampled sampler. + remote_parent_sampled: + # Configure sampler to be always_on. + always_on: {} + # Configure remote_parent_not_sampled sampler. + remote_parent_not_sampled: + # Configure sampler to be always_off. + always_off: {} + # Configure local_parent_sampled sampler. + local_parent_sampled: + # Configure sampler to be always_on. + always_on: {} + # Configure local_parent_not_sampled sampler. + local_parent_not_sampled: + parent_based: + remote_parent_not_sampled: + trace_id_ratio_based: + ratio: 0.0001 + +# Configure resource for all signals. +resource: + # Configure resource attributes. + # + # Environment variable: OTEL_RESOURCE_ATTRIBUTES + attributes: + # Configure `service.name` resource attribute + # + # Environment variable: OTEL_SERVICE_NAME + service.name: !!str "unknown_service" + # Configure the resource schema URL. + schema_url: https://opentelemetry.io/schemas/1.16.0 diff --git a/_configuration/tests/data/logger_provider.json b/_configuration/tests/data/logger_provider.json new file mode 100644 index 00000000000..e41568af9b3 --- /dev/null +++ b/_configuration/tests/data/logger_provider.json @@ -0,0 +1,109 @@ +{ + "$id": "https://opentelemetry.io/otelconfig/logger_provider.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "LoggerProvider", + "type": "object", + "additionalProperties": false, + "properties": { + "processors": { + "type": "array", + "items": { + "$ref": "#/$defs/LogRecordProcessor" + } + }, + "limits": { + "$ref": "#/$defs/LogRecordLimits" + } + }, + "$defs": { + "SimpleLogRecordProcessor": { + "type": "object", + "additionalProperties": false, + "properties": { + "exporter": { + "$ref": "#/$defs/LogRecordExporter" + } + }, + "required": [ + "exporter" + ] + }, + "BatchLogRecordProcessor": { + "type": "object", + "additionalProperties": false, + "properties": { + "schedule_delay": { + "type": "integer", + "minimum": 0 + }, + "export_timeout": { + "type": "integer", + "minimum": 0 + }, + "max_queue_size": { + "type": "integer", + "minimum": 0 + }, + "max_export_batch_size": { + "type": "integer", + "minimum": 0 + }, + "exporter": { + "$ref": "#/$defs/LogRecordExporter" + } + }, + "required": [ + "exporter" + ] + }, + "LogRecordExporter": { + "type": "object", + "additionalProperties": true, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "otlp": { + "$ref": "common.json#/$defs/Otlp" + } + }, + "patternProperties": { + ".*": { + "type": "object" + } + } + }, + "LogRecordLimits": { + "type": "object", + "additionalProperties": false, + "properties": { + "attribute_value_length_limit": { + "type": "integer", + "minimum": 0 + }, + "attribute_count_limit": { + "type": "integer", + "minimum": 0 + } + } + }, + "LogRecordProcessor": { + "type": "object", + "additionalProperties": true, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "batch": { + "$ref": "#/$defs/BatchLogRecordProcessor" + }, + "simple": { + "$ref": "#/$defs/SimpleLogRecordProcessor" + } + }, + "patternProperties": { + ".*": { + "type": "object" + } + } + } + } +} diff --git a/_configuration/tests/data/meter_provider.json b/_configuration/tests/data/meter_provider.json new file mode 100644 index 00000000000..9c6dc7921c4 --- /dev/null +++ b/_configuration/tests/data/meter_provider.json @@ -0,0 +1,264 @@ +{ + "$id": "https://opentelemetry.io/otelconfig/meter_provider.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "MeterProvider", + "type": "object", + "additionalProperties": false, + "properties": { + "readers": { + "type": "array", + "items": { + "$ref": "#/$defs/MetricReader" + } + }, + "views": { + "type": "array", + "items": { + "$ref": "#/$defs/View" + } + } + }, + "$defs": { + "PeriodicMetricReader": { + "type": "object", + "additionalProperties": false, + "properties": { + "interval": { + "type": "integer", + "minimum": 0 + }, + "timeout": { + "type": "integer", + "minimum": 0 + }, + "exporter": { + "$ref": "#/$defs/MetricExporter" + } + }, + "required": [ + "exporter" + ], + "title": "PeriodicMetricReader" + }, + "PullMetricReader": { + "type": "object", + "additionalProperties": false, + "properties": { + "exporter": { + "$ref": "#/$defs/MetricExporter" + } + }, + "required": [ + "exporter" + ], + "title": "PullMetricReader" + }, + "MetricExporter": { + "type": "object", + "additionalProperties": true, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "otlp": { + "$ref": "#/$defs/OtlpMetric" + }, + "console": { + "$ref": "common.json#/$defs/Console" + }, + "prometheus": { + "$ref": "#/$defs/Prometheus" + } + }, + "patternProperties": { + ".*": { + "type": "object" + } + } + }, + "Prometheus": { + "type": "object", + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "integer" + } + } + }, + "MetricReader": { + "type": "object", + "additionalProperties": false, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "periodic": { + "$ref": "#/$defs/PeriodicMetricReader" + }, + "pull": { + "$ref": "#/$defs/PullMetricReader" + } + } + }, + "OtlpMetric": { + "type": "object", + "additionalProperties": false, + "properties": { + "protocol": { + "type": "string", + "pattern": "^(http|grpc)\\/(protobuf|json)" + }, + "endpoint": { + "type": "string" + }, + "certificate": { + "type": "string" + }, + "client_key": { + "type": "string" + }, + "client_certificate": { + "type": "string" + }, + "headers": { + "$ref": "common.json#/$defs/Headers" + }, + "compression": { + "type": "string" + }, + "timeout": { + "type": "integer", + "minimum": 0 + }, + "temporality_preference": { + "type": "string" + }, + "default_histogram_aggregation": { + "type": "string", + "enum": [ + "explicit_bucket_histogram", + "base2_exponential_bucket_histogram" + ] + } + }, + "required": [ + "endpoint", + "protocol" + ], + "title": "OtlpMetric" + }, + "View": { + "type": "object", + "additionalProperties": false, + "properties": { + "selector": { + "title": "Selector", + "type": "object", + "additionalProperties": false, + "properties": { + "instrument_name": { + "type": "string" + }, + "instrument_type": { + "type": "string", + "enum": [ + "counter", + "histogram", + "observable_counter", + "observable_gauge", + "observable_up_down_counter", + "up_down_counter" + ] + }, + "unit": { + "type": "string" + }, + "meter_name": { + "type": "string" + }, + "meter_version": { + "type": "string" + }, + "meter_schema_url": { + "type": "string" + } + } + }, + "stream": { + "title": "Stream", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "aggregation": { + "type": "object", + "additionalProperties": false, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "default": { + "type": "object", + "additionalProperties": false + }, + "drop": { + "type": "object", + "additionalProperties": false + }, + "explicit_bucket_histogram": { + "type": "object", + "additionalProperties": false, + "properties": { + "boundaries": { + "type": "array", + "items": { + "type": "number" + } + }, + "record_min_max": { + "type": "boolean" + } + } + }, + "base2_exponential_bucket_histogram": { + "type": "object", + "additionalProperties": false, + "properties": { + "max_scale": { + "type": "integer" + }, + "max_size": { + "type": "integer" + }, + "record_min_max": { + "type": "boolean" + } + } + }, + "last_value": { + "type": "object", + "additionalProperties": false + }, + "sum": { + "type": "object", + "additionalProperties": false + } + } + }, + "attribute_keys": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + } + } + } +} diff --git a/_configuration/tests/data/opentelemetry_configuration.json b/_configuration/tests/data/opentelemetry_configuration.json new file mode 100644 index 00000000000..d606299373f --- /dev/null +++ b/_configuration/tests/data/opentelemetry_configuration.json @@ -0,0 +1,50 @@ +{ + "$id": "https://opentelemetry.io/otelconfig/opentelemetry_configuration.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "OpenTelemetryConfiguration", + "type": "object", + "additionalProperties": true, + "properties": { + "file_format": { + "type": "string" + }, + "disabled": { + "type": "boolean" + }, + "attribute_limits": { + "$ref": "#/$defs/AttributeLimits" + }, + "logger_provider": { + "$ref": "logger_provider.json" + }, + "meter_provider": { + "$ref": "meter_provider.json" + }, + "propagator": { + "$ref": "propagator.json" + }, + "tracer_provider": { + "$ref": "tracer_provider.json" + }, + "resource": { + "$ref": "resource.json" + } + }, + "required": [ + "file_format" + ], + "$defs": { + "AttributeLimits": { + "type": "object", + "additionalProperties": true, + "properties": { + "attribute_value_length_limit": { + "type": "integer" + }, + "attribute_count_limit": { + "type": "integer" + } + } + } + } +} diff --git a/_configuration/tests/data/propagator.json b/_configuration/tests/data/propagator.json new file mode 100644 index 00000000000..1b29ec273d0 --- /dev/null +++ b/_configuration/tests/data/propagator.json @@ -0,0 +1,17 @@ +{ + "$id": "https://opentelemetry.io/otelconfig/propagator.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Propagator", + "type": "object", + "minProperties": 1, + "maxProperties": 1, + "additionalProperties": true, + "properties": { + "composite": { + "type": "array", + "items": { + "type": "string" + } + } + } +} diff --git a/_configuration/tests/data/resource.json b/_configuration/tests/data/resource.json new file mode 100644 index 00000000000..d18267a82f2 --- /dev/null +++ b/_configuration/tests/data/resource.json @@ -0,0 +1,27 @@ +{ + "$id": "https://opentelemetry.io/otelconfig/resource.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Resource", + "type": "object", + "additionalProperties": false, + "properties": { + "attributes": { + "$ref": "#/$defs/Attributes" + }, + "schema_url": { + "type": "string" + } + }, + "$defs": { + "Attributes": { + "title": "Attributes", + "type": "object", + "additionalProperties": true, + "properties": { + "service.name": { + "type": "string" + } + } + } + } +} diff --git a/_configuration/tests/data/tracer_provider.json b/_configuration/tests/data/tracer_provider.json new file mode 100644 index 00000000000..96dfa5c9b51 --- /dev/null +++ b/_configuration/tests/data/tracer_provider.json @@ -0,0 +1,220 @@ +{ + "$id": "https://opentelemetry.io/otelconfig/tracer_provider.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "TracerProvider", + "type": "object", + "additionalProperties": false, + "properties": { + "processors": { + "type": "array", + "items": { + "$ref": "#/$defs/SpanProcessor" + } + }, + "limits": { + "$ref": "#/$defs/SpanLimits" + }, + "sampler": { + "$ref": "#/$defs/Sampler" + } + }, + "$defs": { + "BatchSpanProcessor": { + "type": "object", + "additionalProperties": false, + "title": "BatchSpanProcessor", + "properties": { + "schedule_delay": { + "type": "integer", + "minimum": 0 + }, + "export_timeout": { + "type": "integer", + "minimum": 0 + }, + "max_queue_size": { + "type": "integer", + "minimum": 0 + }, + "max_export_batch_size": { + "type": "integer", + "minimum": 0 + }, + "exporter": { + "$ref": "#/$defs/SpanExporter" + } + }, + "required": [ + "exporter" + ] + }, + "Sampler": { + "type": "object", + "additionalProperties": true, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "always_off": { + "type": "object", + "additionalProperties": false + }, + "always_on": { + "type": "object", + "additionalProperties": false + }, + "jaeger_remote": { + "type": "object", + "additionalProperties": false, + "properties": { + "endpoint": { + "type": "string" + }, + "interval": { + "type": "integer", + "minimum": 0 + }, + "initial_sampler": { + "$ref": "#/$defs/Sampler" + } + } + }, + "parent_based": { + "type": "object", + "additionalProperties": false, + "properties": { + "root": { + "$ref": "#/$defs/Sampler" + }, + "remote_parent_sampled": { + "$ref": "#/$defs/Sampler" + }, + "remote_parent_not_sampled": { + "$ref": "#/$defs/Sampler" + }, + "local_parent_sampled": { + "$ref": "#/$defs/Sampler" + }, + "local_parent_not_sampled": { + "$ref": "#/$defs/Sampler" + } + } + }, + "trace_id_ratio_based": { + "type": "object", + "additionalProperties": false, + "properties": { + "ratio": { + "type": "number" + } + } + } + }, + "patternProperties": { + ".*": { + "type": "object" + } + } + }, + "SimpleSpanProcessor": { + "type": "object", + "additionalProperties": false, + "title": "SimpleSpanProcessor", + "properties": { + "exporter": { + "$ref": "#/$defs/SpanExporter" + } + }, + "required": [ + "exporter" + ] + }, + "SpanExporter": { + "type": "object", + "additionalProperties": true, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "otlp": { + "$ref": "common.json#/$defs/Otlp" + }, + "console": { + "$ref": "common.json#/$defs/Console" + }, + "zipkin": { + "$ref": "#/$defs/Zipkin" + } + }, + "patternProperties": { + ".*": { + "type": "object" + } + } + }, + "SpanLimits": { + "type": "object", + "additionalProperties": false, + "properties": { + "attribute_value_length_limit": { + "type": "integer", + "minimum": 0 + }, + "attribute_count_limit": { + "type": "integer", + "minimum": 0 + }, + "event_count_limit": { + "type": "integer", + "minimum": 0 + }, + "link_count_limit": { + "type": "integer", + "minimum": 0 + }, + "event_attribute_count_limit": { + "type": "integer", + "minimum": 0 + }, + "link_attribute_count_limit": { + "type": "integer", + "minimum": 0 + } + } + }, + "SpanProcessor": { + "type": "object", + "additionalProperties": true, + "minProperties": 1, + "maxProperties": 1, + "properties": { + "batch": { + "$ref": "#/$defs/BatchSpanProcessor" + }, + "simple": { + "$ref": "#/$defs/SimpleSpanProcessor" + } + }, + "patternProperties": { + ".*": { + "type": "object" + } + } + }, + "Zipkin": { + "type": "object", + "additionalProperties": false, + "properties": { + "endpoint": { + "type": "string" + }, + "timeout": { + "type": "integer", + "minimum": 0 + } + }, + "required": [ + "endpoint" + ], + "title": "Zipkin" + } + } +} diff --git a/_configuration/tests/test_configuration.py b/_configuration/tests/test_configuration.py new file mode 100644 index 00000000000..dbbb390be1b --- /dev/null +++ b/_configuration/tests/test_configuration.py @@ -0,0 +1,164 @@ +# Copyright The OpenTelemetry Authors + +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from opentelemetry.configuration._internal.path_function import set_resource +from opentelemetry.configuration import ( + resolve_schema, + process_schema, + create_object, + validate_configuration, + load_configuration, + substitute_environment_variables, + render_schema, +) +from unittest.mock import patch +from os import environ +from pathlib import Path +from pytest import fail + +data_path = Path(__file__).parent.joinpath("data") + + +def test_create_object(): + + configuration = load_configuration( + data_path.joinpath("configuration_0.yaml") + ) + + try: + validate_configuration(configuration) + except Exception as error: + fail(f"Unexpected exception raised: {error}") + + processed_schema = process_schema( + resolve_schema( + data_path.joinpath("opentelemetry_configuration.json") + ) + ) + + set_resource( + create_object(configuration, processed_schema, "resource") + ) + + tracer_provider = create_object( + configuration, processed_schema, "tracer_provider" + ) + + assert ( + tracer_provider. + sampler. + parent_based. + root. + trace_id_ratio_based. + _root. + _rate + ) == 0.0001 + + assert ( + tracer_provider. + sampler. + parent_based. + local_parent_not_sampled. + parent_based. + remote_parent_not_sampled. + trace_id_ratio_based. + _root. + _rate + ) == 0.0001 + + assert ( + tracer_provider. + _span_limits. + max_events + ) == 128 + + assert ( + tracer_provider. + _active_span_processor. + _span_processors[0]. + max_queue_size + ) == 2048 + + assert ( + tracer_provider. + _active_span_processor. + _span_processors[0]. + span_exporter. + _headers["api-key"] + ) == "1234" + + assert ( + tracer_provider. + _active_span_processor. + _span_processors[1]. + span_exporter. + endpoint + ) == "http://localhost:9411/api/v2/spans" + + assert ( + tracer_provider. + _active_span_processor. + _span_processors[2]. + span_exporter. + __class__. + __name__ + ) == "ConsoleSpanExporter" + + assert ( + tracer_provider. + _resource. + _schema_url + ) == "https://opentelemetry.io/schemas/1.16.0" + + +@patch.dict(environ, {"OTEL_BLRB_EXPORT_TIMEOUT": "943"}, clear=True) +def test_substitute_environment_variables(): + configuration = load_configuration( + data_path.joinpath("configuration_1.yaml") + ) + + processed_schema = process_schema( + resolve_schema( + data_path.joinpath("opentelemetry_configuration.json") + ) + ) + configuration = substitute_environment_variables( + configuration, processed_schema + ) + + assert ( + configuration + ["logger_provider"] + ["processors"] + [0] + ["batch"] + ["export_timeout"] + ) == 943 + try: + validate_configuration(configuration) + except Exception as error: + fail(f"Unexpected exception raised: {error}") + + +def test_render(tmpdir): + + render_schema( + process_schema( + resolve_schema( + data_path.joinpath("opentelemetry_configuration.json") + ) + ), + tmpdir.join("path_function.py") + ) From 01c6247b2c95503edddc18b64e6509b869137cb5 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Thu, 18 Jan 2024 14:32:26 -0600 Subject: [PATCH 02/18] Relocate schema and configuration files --- .../configuration/_internal/__init__.py | 11 +--- .../{ => configuration}/configuration_0.yaml | 0 .../{ => configuration}/configuration_1.yaml | 0 .../tests/data/{ => schema}/common.json | 0 .../data/{ => schema}/logger_provider.json | 0 .../data/{ => schema}/meter_provider.json | 0 .../opentelemetry_configuration.json | 0 .../tests/data/{ => schema}/propagator.json | 0 .../tests/data/{ => schema}/resource.json | 0 .../data/{ => schema}/tracer_provider.json | 0 _configuration/tests/test_configuration.py | 51 +++++++++++-------- 11 files changed, 32 insertions(+), 30 deletions(-) rename _configuration/tests/data/{ => configuration}/configuration_0.yaml (100%) rename _configuration/tests/data/{ => configuration}/configuration_1.yaml (100%) rename _configuration/tests/data/{ => schema}/common.json (100%) rename _configuration/tests/data/{ => schema}/logger_provider.json (100%) rename _configuration/tests/data/{ => schema}/meter_provider.json (100%) rename _configuration/tests/data/{ => schema}/opentelemetry_configuration.json (100%) rename _configuration/tests/data/{ => schema}/propagator.json (100%) rename _configuration/tests/data/{ => schema}/resource.json (100%) rename _configuration/tests/data/{ => schema}/tracer_provider.json (100%) diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_configuration/src/opentelemetry/configuration/_internal/__init__.py index b8903cb2c42..8f7f9d69cd6 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/__init__.py +++ b/_configuration/src/opentelemetry/configuration/_internal/__init__.py @@ -19,7 +19,6 @@ from jsonref import JsonRef from os.path import exists from pathlib import Path -from os import getcwd from collections import OrderedDict from json import loads as json_loads from jsonref import loads as jsonref_loads @@ -60,15 +59,9 @@ def load_configuration(configuration_file_path: str) -> dict: return safe_load(configuration_file) -def validate_configuration(configuration: dict): +def validate_configuration(schema_path: Path, configuration: dict): - root_path = Path(getcwd()).parent.parent - - schema_path = str( - root_path. - joinpath("schema"). - joinpath("opentelemetry_configuration.json") - ) + schema_path = str(schema_path) if not exists(schema_path): raise Exception(f"{schema_path} does not exist") diff --git a/_configuration/tests/data/configuration_0.yaml b/_configuration/tests/data/configuration/configuration_0.yaml similarity index 100% rename from _configuration/tests/data/configuration_0.yaml rename to _configuration/tests/data/configuration/configuration_0.yaml diff --git a/_configuration/tests/data/configuration_1.yaml b/_configuration/tests/data/configuration/configuration_1.yaml similarity index 100% rename from _configuration/tests/data/configuration_1.yaml rename to _configuration/tests/data/configuration/configuration_1.yaml diff --git a/_configuration/tests/data/common.json b/_configuration/tests/data/schema/common.json similarity index 100% rename from _configuration/tests/data/common.json rename to _configuration/tests/data/schema/common.json diff --git a/_configuration/tests/data/logger_provider.json b/_configuration/tests/data/schema/logger_provider.json similarity index 100% rename from _configuration/tests/data/logger_provider.json rename to _configuration/tests/data/schema/logger_provider.json diff --git a/_configuration/tests/data/meter_provider.json b/_configuration/tests/data/schema/meter_provider.json similarity index 100% rename from _configuration/tests/data/meter_provider.json rename to _configuration/tests/data/schema/meter_provider.json diff --git a/_configuration/tests/data/opentelemetry_configuration.json b/_configuration/tests/data/schema/opentelemetry_configuration.json similarity index 100% rename from _configuration/tests/data/opentelemetry_configuration.json rename to _configuration/tests/data/schema/opentelemetry_configuration.json diff --git a/_configuration/tests/data/propagator.json b/_configuration/tests/data/schema/propagator.json similarity index 100% rename from _configuration/tests/data/propagator.json rename to _configuration/tests/data/schema/propagator.json diff --git a/_configuration/tests/data/resource.json b/_configuration/tests/data/schema/resource.json similarity index 100% rename from _configuration/tests/data/resource.json rename to _configuration/tests/data/schema/resource.json diff --git a/_configuration/tests/data/tracer_provider.json b/_configuration/tests/data/schema/tracer_provider.json similarity index 100% rename from _configuration/tests/data/tracer_provider.json rename to _configuration/tests/data/schema/tracer_provider.json diff --git a/_configuration/tests/test_configuration.py b/_configuration/tests/test_configuration.py index dbbb390be1b..31908321a4a 100644 --- a/_configuration/tests/test_configuration.py +++ b/_configuration/tests/test_configuration.py @@ -34,19 +34,21 @@ def test_create_object(): configuration = load_configuration( - data_path.joinpath("configuration_0.yaml") + data_path.joinpath("configuration").joinpath("configuration_0.yaml") + ) + + schema_path = ( + data_path. + joinpath("schema"). + joinpath("opentelemetry_configuration.json") ) try: - validate_configuration(configuration) + validate_configuration(schema_path, configuration) except Exception as error: fail(f"Unexpected exception raised: {error}") - processed_schema = process_schema( - resolve_schema( - data_path.joinpath("opentelemetry_configuration.json") - ) - ) + processed_schema = process_schema(resolve_schema(schema_path)) set_resource( create_object(configuration, processed_schema, "resource") @@ -126,14 +128,16 @@ def test_create_object(): @patch.dict(environ, {"OTEL_BLRB_EXPORT_TIMEOUT": "943"}, clear=True) def test_substitute_environment_variables(): configuration = load_configuration( - data_path.joinpath("configuration_1.yaml") + data_path.joinpath("configuration").joinpath("configuration_1.yaml") ) - processed_schema = process_schema( - resolve_schema( - data_path.joinpath("opentelemetry_configuration.json") - ) + schema_path = ( + data_path. + joinpath("schema"). + joinpath("opentelemetry_configuration.json") ) + + processed_schema = process_schema(resolve_schema(schema_path)) configuration = substitute_environment_variables( configuration, processed_schema ) @@ -147,18 +151,23 @@ def test_substitute_environment_variables(): ["export_timeout"] ) == 943 try: - validate_configuration(configuration) + validate_configuration(schema_path, configuration) except Exception as error: fail(f"Unexpected exception raised: {error}") def test_render(tmpdir): - render_schema( - process_schema( - resolve_schema( - data_path.joinpath("opentelemetry_configuration.json") - ) - ), - tmpdir.join("path_function.py") - ) + try: + render_schema( + process_schema( + resolve_schema( + data_path. + joinpath("schema"). + joinpath("opentelemetry_configuration.json") + ) + ), + tmpdir.join("path_function.py") + ) + except Exception as error: + fail(f"Unexpected exception raised: {error}") From fca32f45262ecd3f08a19c8160797d26d2f8002b Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Thu, 18 Jan 2024 14:34:44 -0600 Subject: [PATCH 03/18] Add instructions to run the test cases --- _configuration/README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/_configuration/README.rst b/_configuration/README.rst index d24dc67dac0..a2ae7064a44 100644 --- a/_configuration/README.rst +++ b/_configuration/README.rst @@ -40,3 +40,5 @@ To create any provider object first create a ``Resource`` object: tracer_provider = create_object( configuration, processed_schema, "tracer_provider" ) + +To run the tests, just run ``nox`` from the directory where ``noxfile.py`` is. From f631f48c3372b3b92309d482d1036c9886643e5a Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Mon, 22 Jan 2024 13:19:43 -0600 Subject: [PATCH 04/18] WIP --- .../configuration/_internal/__init__.py | 1 + _configuration/tests/test_configuration.py | 27 +++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_configuration/src/opentelemetry/configuration/_internal/__init__.py index 8f7f9d69cd6..beaa8b62a0b 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/__init__.py +++ b/_configuration/src/opentelemetry/configuration/_internal/__init__.py @@ -67,6 +67,7 @@ def validate_configuration(schema_path: Path, configuration: dict): raise Exception(f"{schema_path} does not exist") def retrieve_from_path(path: str): + set_trace() return Resource.from_contents(json_loads(Path(path).read_text())) Draft202012Validator( diff --git a/_configuration/tests/test_configuration.py b/_configuration/tests/test_configuration.py index 31908321a4a..593c1762375 100644 --- a/_configuration/tests/test_configuration.py +++ b/_configuration/tests/test_configuration.py @@ -27,6 +27,8 @@ from os import environ from pathlib import Path from pytest import fail +from jsonschema.validators import Draft202012Validator +from ipdb import set_trace data_path = Path(__file__).parent.joinpath("data") @@ -171,3 +173,28 @@ def test_render(tmpdir): ) except Exception as error: fail(f"Unexpected exception raised: {error}") + + +def test_subschemas(): + + schema_path = ( + data_path. + joinpath("schema"). + joinpath("opentelemetry_configuration.json") + ) + resolved_schema = resolve_schema(schema_path) + resolved_schema + + # FIXME once the schema has been resolved, we get a dictionary. Add to this + # dictionary the schema components of each plugin component sub schema then + # use the resulting schema dictionary to do the validation. + + set_trace() + + configuration = load_configuration( + data_path.joinpath("configuration").joinpath("configuration_0.yaml") + ) + + # FIXME do the same for configuration components + + Draft202012Validator(resolved_schema).validate(configuration) From 2111c71b9b1967a2e2a57e83fcd3156e576f99f7 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Mon, 22 Jan 2024 18:51:32 -0600 Subject: [PATCH 05/18] Fix lint --- .gitignore | 1 + .../opentelemetry/configuration/__init__.py | 8 +- .../configuration/_internal/__init__.py | 139 +++++------ .../configuration/_internal/path_function.py | 218 +++++++----------- _configuration/tests/test_configuration.py | 123 ++++------ 5 files changed, 196 insertions(+), 293 deletions(-) diff --git a/.gitignore b/.gitignore index 07c7b9aa6e4..5812d8ea4cd 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,7 @@ coverage.xml .coverage .nox .tox +.nox .cache htmlcov diff --git a/_configuration/src/opentelemetry/configuration/__init__.py b/_configuration/src/opentelemetry/configuration/__init__.py index e2dbadcfd1d..173a4b6466e 100644 --- a/_configuration/src/opentelemetry/configuration/__init__.py +++ b/_configuration/src/opentelemetry/configuration/__init__.py @@ -19,13 +19,13 @@ from opentelemetry.configuration._internal import ( - resolve_schema, - validate_configuration, - process_schema, - render_schema, create_object, load_configuration, + process_schema, + render_schema, + resolve_schema, substitute_environment_variables, + validate_configuration, ) __all__ = [ diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_configuration/src/opentelemetry/configuration/_internal/__init__.py index beaa8b62a0b..891fbd10599 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/__init__.py +++ b/_configuration/src/opentelemetry/configuration/_internal/__init__.py @@ -12,20 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ipdb import set_trace +from collections import OrderedDict +from json import loads as json_loads from os import environ -from yaml import safe_load -from re import compile as re_compile -from jsonref import JsonRef from os.path import exists from pathlib import Path -from collections import OrderedDict -from json import loads as json_loads +from re import compile as re_compile + +from ipdb import set_trace +from jinja2 import Environment, FileSystemLoader +from jsonref import JsonRef from jsonref import loads as jsonref_loads from jsonschema.validators import Draft202012Validator from referencing import Registry, Resource +from yaml import safe_load + from opentelemetry.configuration._internal.path_function import path_function -from jinja2 import Environment, FileSystemLoader set_trace @@ -36,7 +38,7 @@ "string": str, "array": list, "object": object, - "number": float + "number": float, } @@ -71,13 +73,11 @@ def retrieve_from_path(path: str): return Resource.from_contents(json_loads(Path(path).read_text())) Draft202012Validator( - {"$ref": schema_path}, - registry=Registry(retrieve=retrieve_from_path) + {"$ref": schema_path}, registry=Registry(retrieve=retrieve_from_path) ).validate(configuration) def process_schema(schema: dict) -> dict: - def traverse( schema: dict, schema_value_id_stack: list, @@ -104,8 +104,8 @@ def traverse( positional_attributes = set(schema.get("required", [])) - optional_attributes = ( - all_attributes.difference(positional_attributes) + optional_attributes = all_attributes.difference( + positional_attributes ) positional_attributes = sorted(list(positional_attributes)) @@ -116,22 +116,18 @@ def traverse( for positional_attribute in positional_attributes: - result_positional_attributes[positional_attribute] = ( - str( - _type_type[ - schema_properties[positional_attribute]["type"] - ].__name__ - ) + result_positional_attributes[positional_attribute] = str( + _type_type[ + schema_properties[positional_attribute]["type"] + ].__name__ ) for optional_attribute in optional_attributes: - result_optional_attributes[optional_attribute] = ( - str( - _type_type[ - schema_properties[optional_attribute]["type"] - ].__name__ - ) + result_optional_attributes[optional_attribute] = str( + _type_type[ + schema_properties[optional_attribute]["type"] + ].__name__ ) children = {} @@ -148,7 +144,7 @@ def traverse( or "patternProperties" in schema.keys() ), "recursive_path": recursive_path, - "children": children + "children": children, } if recursive_path: @@ -156,11 +152,11 @@ def traverse( for ( schema_properties_key, - schema_properties_value + schema_properties_value, ) in schema_properties.items(): - schema_properties_value_type = ( - schema_properties_value.get("type") + schema_properties_value_type = schema_properties_value.get( + "type" ) if ( @@ -170,8 +166,8 @@ def traverse( continue if isinstance(schema_properties_value, JsonRef): - schema_properties_value_id = ( - id(schema_properties_value.__subject__) + schema_properties_value_id = id( + schema_properties_value.__subject__ ) else: @@ -190,11 +186,8 @@ def traverse( for ( current_schema_key_stack, - current_schema_value_id - ) in zip( - schema_key_stack[1:], - schema_value_id_stack - ): + current_schema_value_id, + ) in zip(schema_key_stack[1:], schema_value_id_stack): recursive_path.append(current_schema_key_stack) if ( schema_properties_value_id @@ -221,7 +214,6 @@ def traverse( def render_schema(processed_schema: dict, path_function_path: Path): - def traverse( processed_schema: dict, schema_function: dict, @@ -230,7 +222,7 @@ def traverse( for ( processed_schema_key, - processed_schema_value + processed_schema_value, ) in processed_schema.items(): if not isinstance(processed_schema_value, dict): @@ -252,7 +244,7 @@ def traverse( schema_function[processed_schema_key] = { "function": processed_schema_value["function_name"], "children": schema_function_children, - "recursive_path": processed_schema_value["recursive_path"] + "recursive_path": processed_schema_value["recursive_path"], } children = processed_schema_value["children"] @@ -278,8 +270,9 @@ def traverse( "\n".join( [ f"{line} # noqa" if len(line) > 80 else line - for line in environment.get_template("template.jinja2"). - render(locals()).split("\n") + for line in environment.get_template("template.jinja2") + .render(locals()) + .split("\n") ] ) ) @@ -288,7 +281,6 @@ def traverse( def create_object( configuration: dict, processed_schema: dict, object_name: str ) -> object: - def create_object( configuration: dict, processed_schema: dict, @@ -300,9 +292,7 @@ def create_object( positional_arguments = [] optional_arguments = {} - for configuration_key, configuration_value in ( - configuration.items() - ): + for configuration_key, configuration_value in configuration.items(): if isinstance(configuration_value, dict): @@ -312,26 +302,22 @@ def create_object( new_path_function = original_path_function for path in processed_schema["recursive_path"]: - new_processed_schema = ( - new_processed_schema[path]["children"] - ) - new_path_function = ( - new_path_function[path]["children"] - ) - - new_processed_schema = ( - new_processed_schema[configuration_key] - ) - new_path_function = ( - new_path_function[configuration_key] - ) + new_processed_schema = new_processed_schema[path][ + "children" + ] + new_path_function = new_path_function[path]["children"] + + new_processed_schema = new_processed_schema[ + configuration_key + ] + new_path_function = new_path_function[configuration_key] else: - new_processed_schema = ( - processed_schema["children"][configuration_key] - ) - new_path_function = ( - path_function["children"][configuration_key] - ) + new_processed_schema = processed_schema["children"][ + configuration_key + ] + new_path_function = path_function["children"][ + configuration_key + ] object_ = create_object( configuration_value, @@ -383,14 +369,12 @@ def create_object( def substitute_environment_variables( - configuration: dict, - processed_schema: dict + configuration: dict, processed_schema: dict ) -> dict: - def traverse( configuration: dict, processed_schema: dict, - original_processed_schema: dict + original_processed_schema: dict, ): for configuration_key, configuration_value in configuration.items(): @@ -400,9 +384,9 @@ def traverse( if isinstance(configuration_value, dict): - recursive_paths = ( - processed_schema[configuration_key]["recursive_path"] - ) + recursive_paths = processed_schema[configuration_key][ + "recursive_path" + ] if recursive_paths: @@ -415,9 +399,7 @@ def traverse( children = processed_schema[configuration_key]["children"] traverse( - configuration_value, - children, - original_processed_schema + configuration_value, children, original_processed_schema ) elif isinstance(configuration_value, list): @@ -427,7 +409,7 @@ def traverse( traverse( element, processed_schema[configuration_key]["children"], - original_processed_schema + original_processed_schema, ) elif isinstance(configuration_value, str): @@ -436,10 +418,9 @@ def traverse( if match is not None: - configuration[configuration_key] = ( - __builtins__[processed_schema[configuration_key]] - (environ.get(match.group(1))) - ) + configuration[configuration_key] = __builtins__[ + processed_schema[configuration_key] + ](environ.get(match.group(1))) traverse(configuration, processed_schema, processed_schema) diff --git a/_configuration/src/opentelemetry/configuration/_internal/path_function.py b/_configuration/src/opentelemetry/configuration/_internal/path_function.py index 37830985558..ca9da03f549 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/path_function.py +++ b/_configuration/src/opentelemetry/configuration/_internal/path_function.py @@ -12,27 +12,32 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import ( - TracerProvider, SynchronousMultiSpanProcessor, SpanLimits -) -from opentelemetry.sdk.trace.export import ( - BatchSpanProcessor, ConsoleSpanExporter, SimpleSpanProcessor -) +from unittest.mock import Mock +from urllib.parse import urlparse + from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import ( - OTLPSpanExporter as GRPCOTLPSpanExporter + OTLPSpanExporter as GRPCOTLPSpanExporter, ) from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( - OTLPSpanExporter as HTTPOTLPSpanExporter + OTLPSpanExporter as HTTPOTLPSpanExporter, ) from opentelemetry.exporter.zipkin.proto.http import ZipkinExporter +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import ( + SpanLimits, + SynchronousMultiSpanProcessor, + TracerProvider, +) +from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + ConsoleSpanExporter, + SimpleSpanProcessor, +) from opentelemetry.sdk.trace.sampling import ( - ParentBasedTraceIdRatio, ALWAYS_OFF, ALWAYS_ON, + ParentBasedTraceIdRatio, ) -from urllib.parse import urlparse -from unittest.mock import Mock _resource = None @@ -50,17 +55,12 @@ def attribute_limits( pass -def logger_provider( - limits: object = None, - processors: list = None -): +def logger_provider(limits: object = None, processors: list = None): pass def logger_provider_processors( - batch: object = None, - simple: object = None, - **kwargs + batch: object = None, simple: object = None, **kwargs ): pass @@ -70,15 +70,12 @@ def logger_provider_processors_batch( export_timeout: int = None, max_export_batch_size: int = None, max_queue_size: int = None, - schedule_delay: int = None + schedule_delay: int = None, ): pass -def logger_provider_processors_batch_exporter( - otlp: object = None, - **kwargs -): +def logger_provider_processors_batch_exporter(otlp: object = None, **kwargs): pass @@ -90,27 +87,20 @@ def logger_provider_processors_batch_exporter_otlp( client_key: str = None, compression: str = None, headers: object = None, - timeout: int = None + timeout: int = None, ): pass -def logger_provider_processors_batch_exporter_otlp_headers( - **kwargs -): +def logger_provider_processors_batch_exporter_otlp_headers(**kwargs): pass -def logger_provider_processors_simple( - exporter: object -): +def logger_provider_processors_simple(exporter: object): pass -def logger_provider_processors_simple_exporter( - otlp: object = None, - **kwargs -): +def logger_provider_processors_simple_exporter(otlp: object = None, **kwargs): pass @@ -122,42 +112,31 @@ def logger_provider_processors_simple_exporter_otlp( client_key: str = None, compression: str = None, headers: object = None, - timeout: int = None + timeout: int = None, ): pass -def logger_provider_processors_simple_exporter_otlp_headers( - **kwargs -): +def logger_provider_processors_simple_exporter_otlp_headers(**kwargs): pass def logger_provider_limits( - attribute_count_limit: int = None, - attribute_value_length_limit: int = None + attribute_count_limit: int = None, attribute_value_length_limit: int = None ): pass -def meter_provider( - readers: list = None, - views: list = None -): +def meter_provider(readers: list = None, views: list = None): pass -def meter_provider_readers( - periodic: object = None, - pull: object = None -): +def meter_provider_readers(periodic: object = None, pull: object = None): pass def meter_provider_readers_periodic( - exporter: object, - interval: int = None, - timeout: int = None + exporter: object, interval: int = None, timeout: int = None ): pass @@ -181,14 +160,12 @@ def meter_provider_readers_periodic_exporter_otlp( default_histogram_aggregation: str = None, headers: object = None, temporality_preference: str = None, - timeout: int = None + timeout: int = None, ): pass -def meter_provider_readers_periodic_exporter_otlp_headers( - **kwargs -): +def meter_provider_readers_periodic_exporter_otlp_headers(**kwargs): pass @@ -197,15 +174,12 @@ def meter_provider_readers_periodic_exporter_console(): def meter_provider_readers_periodic_exporter_prometheus( - host: str = None, - port: int = None + host: str = None, port: int = None ): pass -def meter_provider_readers_pull( - exporter: object -): +def meter_provider_readers_pull(exporter: object): pass @@ -228,14 +202,12 @@ def meter_provider_readers_pull_exporter_otlp( default_histogram_aggregation: str = None, headers: object = None, temporality_preference: str = None, - timeout: int = None + timeout: int = None, ): pass -def meter_provider_readers_pull_exporter_otlp_headers( - **kwargs -): +def meter_provider_readers_pull_exporter_otlp_headers(**kwargs): pass @@ -244,16 +216,12 @@ def meter_provider_readers_pull_exporter_console(): def meter_provider_readers_pull_exporter_prometheus( - host: str = None, - port: int = None + host: str = None, port: int = None ): pass -def meter_provider_views( - selector: object = None, - stream: object = None -): +def meter_provider_views(selector: object = None, stream: object = None): pass @@ -263,7 +231,7 @@ def meter_provider_views_selector( meter_name: str = None, meter_schema_url: str = None, meter_version: str = None, - unit: str = None + unit: str = None, ): pass @@ -272,7 +240,7 @@ def meter_provider_views_stream( aggregation: object = None, attribute_keys: list = None, description: str = None, - name: str = None + name: str = None, ): pass @@ -283,7 +251,7 @@ def meter_provider_views_stream_aggregation( drop: object = None, explicit_bucket_histogram: object = None, last_value: object = None, - sum: object = None + sum: object = None, ): pass @@ -297,16 +265,13 @@ def meter_provider_views_stream_aggregation_drop(): def meter_provider_views_stream_aggregation_explicit_bucket_histogram( - boundaries: list = None, - record_min_max: bool = None + boundaries: list = None, record_min_max: bool = None ): pass def meter_provider_views_stream_aggregation_base2_exponential_bucket_histogram( - max_scale: int = None, - max_size: int = None, - record_min_max: bool = None + max_scale: int = None, max_size: int = None, record_min_max: bool = None ): pass @@ -319,17 +284,12 @@ def meter_provider_views_stream_aggregation_sum(): pass -def propagator( - composite: list = None, - **kwargs -): +def propagator(composite: list = None, **kwargs): pass def tracer_provider( - limits: object = None, - processors: list = None, - sampler: object = None + limits: object = None, processors: list = None, sampler: object = None ): # FIXME how to define shutdown_on_exit? # FIXME how to define id_generator? @@ -345,14 +305,12 @@ def tracer_provider( sampler=sampler, resource=_resource, active_span_processor=synchronous_multi_span_processor, - span_limits=limits + span_limits=limits, ) def tracer_provider_processors( - batch: object = None, - simple: object = None, - **kwargs + batch: object = None, simple: object = None, **kwargs ): return batch or simple @@ -362,14 +320,14 @@ def tracer_provider_processors_batch( export_timeout: int = None, max_export_batch_size: int = None, max_queue_size: int = None, - schedule_delay: int = None + schedule_delay: int = None, ): return BatchSpanProcessor( exporter, max_queue_size=max_queue_size, schedule_delay_millis=schedule_delay, max_export_batch_size=max_export_batch_size, - export_timeout_millis=export_timeout + export_timeout_millis=export_timeout, ) @@ -390,7 +348,7 @@ def tracer_provider_processors_batch_exporter_otlp( client_key: str = None, compression: str = None, headers: object = None, - timeout: int = None + timeout: int = None, ): protocol = urlparse(protocol).scheme @@ -412,9 +370,7 @@ def tracer_provider_processors_batch_exporter_otlp( ) -def tracer_provider_processors_batch_exporter_otlp_headers( - **kwargs -): +def tracer_provider_processors_batch_exporter_otlp_headers(**kwargs): return kwargs @@ -424,15 +380,12 @@ def tracer_provider_processors_batch_exporter_console(): def tracer_provider_processors_batch_exporter_zipkin( - endpoint: str, - timeout: int = None + endpoint: str, timeout: int = None ): return ZipkinExporter(endpoint, timeout=timeout) -def tracer_provider_processors_simple( - exporter: object -): +def tracer_provider_processors_simple(exporter: object): return SimpleSpanProcessor(exporter) @@ -453,7 +406,7 @@ def tracer_provider_processors_simple_exporter_otlp( client_key: str = None, compression: str = None, headers: object = None, - timeout: int = None + timeout: int = None, ): protocol = urlparse(protocol).scheme @@ -475,9 +428,7 @@ def tracer_provider_processors_simple_exporter_otlp( ) -def tracer_provider_processors_simple_exporter_otlp_headers( - **kwargs -): +def tracer_provider_processors_simple_exporter_otlp_headers(**kwargs): return kwargs @@ -486,8 +437,7 @@ def tracer_provider_processors_simple_exporter_console(): def tracer_provider_processors_simple_exporter_zipkin( - endpoint: str, - timeout: int = None + endpoint: str, timeout: int = None ): return ZipkinExporter(endpoint, timeout=timeout) @@ -498,7 +448,7 @@ def tracer_provider_limits( event_attribute_count_limit: int = None, event_count_limit: int = None, link_attribute_count_limit: int = None, - link_count_limit: int = None + link_count_limit: int = None, ): return SpanLimits( max_span_attributes=attribute_count_limit, @@ -538,15 +488,13 @@ def tracer_provider_sampler_always_on(): def tracer_provider_sampler_jaeger_remote( - endpoint: str = None, - initial_sampler: object = None, - interval: int = None + endpoint: str = None, initial_sampler: object = None, interval: int = None ): return Mock( type="jaeger_remote", endpoint=endpoint, initial_sampler=initial_sampler, - interval=interval + interval=interval, ) @@ -574,7 +522,7 @@ def tracer_provider_sampler_parent_based( local_parent_sampled: object = None, remote_parent_not_sampled: object = None, remote_parent_sampled: object = None, - root: object = None + root: object = None, ): return Mock( type="parent_based", @@ -681,23 +629,15 @@ def tracer_provider_sampler_parent_based_local_parent_not_sampled( ) -def tracer_provider_sampler_trace_id_ratio_based( - ratio: float = None -): +def tracer_provider_sampler_trace_id_ratio_based(ratio: float = None): return ParentBasedTraceIdRatio(ratio) -def resource( - attributes: object = None, - schema_url: str = None -): +def resource(attributes: object = None, schema_url: str = None): return Resource.create(attributes=attributes, schema_url=schema_url) -def resource_attributes( - service_name: str = None, - **kwargs -): +def resource_attributes(service_name: str = None, **kwargs): return {"service.name": service_name, **kwargs} @@ -1005,7 +945,10 @@ def resource_attributes( "initial_sampler": { "function": tracer_provider_sampler_jaeger_remote_initial_sampler, # noqa "children": {}, - "recursive_path": ['tracer_provider', 'sampler'], # noqa + "recursive_path": [ + "tracer_provider", + "sampler", + ], # noqa }, }, "recursive_path": [], @@ -1016,27 +959,42 @@ def resource_attributes( "root": { "function": tracer_provider_sampler_parent_based_root, # noqa "children": {}, - "recursive_path": ['tracer_provider', 'sampler'], # noqa + "recursive_path": [ + "tracer_provider", + "sampler", + ], # noqa }, "remote_parent_sampled": { "function": tracer_provider_sampler_parent_based_remote_parent_sampled, # noqa "children": {}, - "recursive_path": ['tracer_provider', 'sampler'], # noqa + "recursive_path": [ + "tracer_provider", + "sampler", + ], # noqa }, "remote_parent_not_sampled": { "function": tracer_provider_sampler_parent_based_remote_parent_not_sampled, # noqa "children": {}, - "recursive_path": ['tracer_provider', 'sampler'], # noqa + "recursive_path": [ + "tracer_provider", + "sampler", + ], # noqa }, "local_parent_sampled": { "function": tracer_provider_sampler_parent_based_local_parent_sampled, # noqa "children": {}, - "recursive_path": ['tracer_provider', 'sampler'], # noqa + "recursive_path": [ + "tracer_provider", + "sampler", + ], # noqa }, "local_parent_not_sampled": { "function": tracer_provider_sampler_parent_based_local_parent_not_sampled, # noqa "children": {}, - "recursive_path": ['tracer_provider', 'sampler'], # noqa + "recursive_path": [ + "tracer_provider", + "sampler", + ], # noqa }, }, "recursive_path": [], diff --git a/_configuration/tests/test_configuration.py b/_configuration/tests/test_configuration.py index 593c1762375..95c53c18b83 100644 --- a/_configuration/tests/test_configuration.py +++ b/_configuration/tests/test_configuration.py @@ -13,22 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opentelemetry.configuration._internal.path_function import set_resource +from os import environ +from pathlib import Path +from unittest.mock import patch + +from ipdb import set_trace +from jsonschema.validators import Draft202012Validator +from pytest import fail + from opentelemetry.configuration import ( - resolve_schema, - process_schema, create_object, - validate_configuration, load_configuration, - substitute_environment_variables, + process_schema, render_schema, + resolve_schema, + substitute_environment_variables, + validate_configuration, ) -from unittest.mock import patch -from os import environ -from pathlib import Path -from pytest import fail -from jsonschema.validators import Draft202012Validator -from ipdb import set_trace +from opentelemetry.configuration._internal.path_function import set_resource data_path = Path(__file__).parent.joinpath("data") @@ -39,10 +41,8 @@ def test_create_object(): data_path.joinpath("configuration").joinpath("configuration_0.yaml") ) - schema_path = ( - data_path. - joinpath("schema"). - joinpath("opentelemetry_configuration.json") + schema_path = data_path.joinpath("schema").joinpath( + "opentelemetry_configuration.json" ) try: @@ -52,78 +52,48 @@ def test_create_object(): processed_schema = process_schema(resolve_schema(schema_path)) - set_resource( - create_object(configuration, processed_schema, "resource") - ) + set_resource(create_object(configuration, processed_schema, "resource")) tracer_provider = create_object( configuration, processed_schema, "tracer_provider" ) assert ( - tracer_provider. - sampler. - parent_based. - root. - trace_id_ratio_based. - _root. - _rate + tracer_provider.sampler.parent_based.root.trace_id_ratio_based._root._rate ) == 0.0001 assert ( - tracer_provider. - sampler. - parent_based. - local_parent_not_sampled. - parent_based. - remote_parent_not_sampled. - trace_id_ratio_based. - _root. - _rate + tracer_provider.sampler.parent_based.local_parent_not_sampled.parent_based.remote_parent_not_sampled.trace_id_ratio_based._root._rate ) == 0.0001 - assert ( - tracer_provider. - _span_limits. - max_events - ) == 128 + assert (tracer_provider._span_limits.max_events) == 128 assert ( - tracer_provider. - _active_span_processor. - _span_processors[0]. - max_queue_size + tracer_provider._active_span_processor._span_processors[ + 0 + ].max_queue_size ) == 2048 assert ( - tracer_provider. - _active_span_processor. - _span_processors[0]. - span_exporter. - _headers["api-key"] + tracer_provider._active_span_processor._span_processors[ + 0 + ].span_exporter._headers["api-key"] ) == "1234" assert ( - tracer_provider. - _active_span_processor. - _span_processors[1]. - span_exporter. - endpoint + tracer_provider._active_span_processor._span_processors[ + 1 + ].span_exporter.endpoint ) == "http://localhost:9411/api/v2/spans" assert ( - tracer_provider. - _active_span_processor. - _span_processors[2]. - span_exporter. - __class__. - __name__ + tracer_provider._active_span_processor._span_processors[ + 2 + ].span_exporter.__class__.__name__ ) == "ConsoleSpanExporter" assert ( - tracer_provider. - _resource. - _schema_url + tracer_provider._resource._schema_url ) == "https://opentelemetry.io/schemas/1.16.0" @@ -133,10 +103,8 @@ def test_substitute_environment_variables(): data_path.joinpath("configuration").joinpath("configuration_1.yaml") ) - schema_path = ( - data_path. - joinpath("schema"). - joinpath("opentelemetry_configuration.json") + schema_path = data_path.joinpath("schema").joinpath( + "opentelemetry_configuration.json" ) processed_schema = process_schema(resolve_schema(schema_path)) @@ -145,12 +113,9 @@ def test_substitute_environment_variables(): ) assert ( - configuration - ["logger_provider"] - ["processors"] - [0] - ["batch"] - ["export_timeout"] + configuration["logger_provider"]["processors"][0]["batch"][ + "export_timeout" + ] ) == 943 try: validate_configuration(schema_path, configuration) @@ -164,12 +129,12 @@ def test_render(tmpdir): render_schema( process_schema( resolve_schema( - data_path. - joinpath("schema"). - joinpath("opentelemetry_configuration.json") + data_path.joinpath("schema").joinpath( + "opentelemetry_configuration.json" + ) ) ), - tmpdir.join("path_function.py") + tmpdir.join("path_function.py"), ) except Exception as error: fail(f"Unexpected exception raised: {error}") @@ -177,10 +142,8 @@ def test_render(tmpdir): def test_subschemas(): - schema_path = ( - data_path. - joinpath("schema"). - joinpath("opentelemetry_configuration.json") + schema_path = data_path.joinpath("schema").joinpath( + "opentelemetry_configuration.json" ) resolved_schema = resolve_schema(schema_path) resolved_schema From 1961f9d63f9d7742cb22c727f07418fb85c755f1 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Mon, 22 Jan 2024 22:37:11 -0600 Subject: [PATCH 06/18] Add dry_run feature --- .../configuration/_internal/__init__.py | 25 +++++++++++++---- .../configuration/_internal/path_function.py | 17 +++++++++-- _configuration/tests/test_configuration.py | 28 +++++++++++++++++-- 3 files changed, 60 insertions(+), 10 deletions(-) diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_configuration/src/opentelemetry/configuration/_internal/__init__.py index 891fbd10599..f082fc549ad 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/__init__.py +++ b/_configuration/src/opentelemetry/configuration/_internal/__init__.py @@ -26,6 +26,7 @@ from jsonschema.validators import Draft202012Validator from referencing import Registry, Resource from yaml import safe_load +from black import Mode, format_str from opentelemetry.configuration._internal.path_function import path_function @@ -69,7 +70,6 @@ def validate_configuration(schema_path: Path, configuration: dict): raise Exception(f"{schema_path} does not exist") def retrieve_from_path(path: str): - set_trace() return Resource.from_contents(json_loads(Path(path).read_text())) Draft202012Validator( @@ -279,7 +279,10 @@ def traverse( def create_object( - configuration: dict, processed_schema: dict, object_name: str + configuration: dict, + processed_schema: dict, + object_name: str, + dry_run=False ) -> object: def create_object( configuration: dict, @@ -287,6 +290,7 @@ def create_object( path_function: dict, original_processed_schema: dict, original_path_function: dict, + dry_run=False ) -> object: positional_arguments = [] @@ -355,18 +359,29 @@ def create_object( else: optional_arguments[configuration_key] = object_ - return path_function["function"]( + result = path_function["function"]( *positional_arguments, **optional_arguments ) - - return create_object( + if dry_run: + return result[1] + elif isinstance(result, tuple): + return result[0] + else: + return result + + result = create_object( configuration[object_name], processed_schema[object_name], path_function[object_name], processed_schema, path_function, + dry_run=dry_run, ) + if isinstance(result, str): + return format_str(result, mode=Mode(line_length=1)) + return result + def substitute_environment_variables( configuration: dict, processed_schema: dict diff --git a/_configuration/src/opentelemetry/configuration/_internal/path_function.py b/_configuration/src/opentelemetry/configuration/_internal/path_function.py index ca9da03f549..bccf9231f2f 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/path_function.py +++ b/_configuration/src/opentelemetry/configuration/_internal/path_function.py @@ -22,7 +22,7 @@ OTLPSpanExporter as HTTPOTLPSpanExporter, ) from opentelemetry.exporter.zipkin.proto.http import ZipkinExporter -from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.resources import Resource # noqa from opentelemetry.sdk.trace import ( SpanLimits, SynchronousMultiSpanProcessor, @@ -38,6 +38,9 @@ ALWAYS_ON, ParentBasedTraceIdRatio, ) +from ipdb import set_trace + +set_trace _resource = None @@ -634,11 +637,19 @@ def tracer_provider_sampler_trace_id_ratio_based(ratio: float = None): def resource(attributes: object = None, schema_url: str = None): - return Resource.create(attributes=attributes, schema_url=schema_url) + command = ( + f'resource = Resource.create(attributes={attributes}, ' + f'schema_url="{schema_url}")' + ) + exec(command) + return locals()["resource"], command def resource_attributes(service_name: str = None, **kwargs): - return {"service.name": service_name, **kwargs} + command = str({"service.name": service_name, **kwargs}) + command = f'resource_attributes = {command}' + exec(command) + return locals()["resource_attributes"], command path_function = { diff --git a/_configuration/tests/test_configuration.py b/_configuration/tests/test_configuration.py index 95c53c18b83..253cae28bd4 100644 --- a/_configuration/tests/test_configuration.py +++ b/_configuration/tests/test_configuration.py @@ -32,6 +32,8 @@ ) from opentelemetry.configuration._internal.path_function import set_resource +set_trace + data_path = Path(__file__).parent.joinpath("data") @@ -152,8 +154,6 @@ def test_subschemas(): # dictionary the schema components of each plugin component sub schema then # use the resulting schema dictionary to do the validation. - set_trace() - configuration = load_configuration( data_path.joinpath("configuration").joinpath("configuration_0.yaml") ) @@ -161,3 +161,27 @@ def test_subschemas(): # FIXME do the same for configuration components Draft202012Validator(resolved_schema).validate(configuration) + + +def test_dry_run(): + + configuration = load_configuration( + data_path.joinpath("configuration").joinpath("configuration_0.yaml") + ) + + schema_path = data_path.joinpath("schema").joinpath( + "opentelemetry_configuration.json" + ) + + try: + validate_configuration(schema_path, configuration) + except Exception as error: + fail(f"Unexpected exception raised: {error}") + + processed_schema = process_schema(resolve_schema(schema_path)) + + result = create_object( + configuration, processed_schema, "resource", dry_run=True + ) + print() + print(result) From 449531f8029426a100ad734f75cd07b9ad04a61c Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Mon, 22 Jan 2024 23:10:18 -0600 Subject: [PATCH 07/18] Revert "Add dry_run feature" This reverts commit 98f7474fa7d1d8df9f06e2c60969c014d0f16bc0. --- .../configuration/_internal/__init__.py | 25 ++++------------- .../configuration/_internal/path_function.py | 17 ++--------- _configuration/tests/test_configuration.py | 28 ++----------------- 3 files changed, 10 insertions(+), 60 deletions(-) diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_configuration/src/opentelemetry/configuration/_internal/__init__.py index f082fc549ad..891fbd10599 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/__init__.py +++ b/_configuration/src/opentelemetry/configuration/_internal/__init__.py @@ -26,7 +26,6 @@ from jsonschema.validators import Draft202012Validator from referencing import Registry, Resource from yaml import safe_load -from black import Mode, format_str from opentelemetry.configuration._internal.path_function import path_function @@ -70,6 +69,7 @@ def validate_configuration(schema_path: Path, configuration: dict): raise Exception(f"{schema_path} does not exist") def retrieve_from_path(path: str): + set_trace() return Resource.from_contents(json_loads(Path(path).read_text())) Draft202012Validator( @@ -279,10 +279,7 @@ def traverse( def create_object( - configuration: dict, - processed_schema: dict, - object_name: str, - dry_run=False + configuration: dict, processed_schema: dict, object_name: str ) -> object: def create_object( configuration: dict, @@ -290,7 +287,6 @@ def create_object( path_function: dict, original_processed_schema: dict, original_path_function: dict, - dry_run=False ) -> object: positional_arguments = [] @@ -359,29 +355,18 @@ def create_object( else: optional_arguments[configuration_key] = object_ - result = path_function["function"]( + return path_function["function"]( *positional_arguments, **optional_arguments ) - if dry_run: - return result[1] - elif isinstance(result, tuple): - return result[0] - else: - return result - - result = create_object( + + return create_object( configuration[object_name], processed_schema[object_name], path_function[object_name], processed_schema, path_function, - dry_run=dry_run, ) - if isinstance(result, str): - return format_str(result, mode=Mode(line_length=1)) - return result - def substitute_environment_variables( configuration: dict, processed_schema: dict diff --git a/_configuration/src/opentelemetry/configuration/_internal/path_function.py b/_configuration/src/opentelemetry/configuration/_internal/path_function.py index bccf9231f2f..ca9da03f549 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/path_function.py +++ b/_configuration/src/opentelemetry/configuration/_internal/path_function.py @@ -22,7 +22,7 @@ OTLPSpanExporter as HTTPOTLPSpanExporter, ) from opentelemetry.exporter.zipkin.proto.http import ZipkinExporter -from opentelemetry.sdk.resources import Resource # noqa +from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import ( SpanLimits, SynchronousMultiSpanProcessor, @@ -38,9 +38,6 @@ ALWAYS_ON, ParentBasedTraceIdRatio, ) -from ipdb import set_trace - -set_trace _resource = None @@ -637,19 +634,11 @@ def tracer_provider_sampler_trace_id_ratio_based(ratio: float = None): def resource(attributes: object = None, schema_url: str = None): - command = ( - f'resource = Resource.create(attributes={attributes}, ' - f'schema_url="{schema_url}")' - ) - exec(command) - return locals()["resource"], command + return Resource.create(attributes=attributes, schema_url=schema_url) def resource_attributes(service_name: str = None, **kwargs): - command = str({"service.name": service_name, **kwargs}) - command = f'resource_attributes = {command}' - exec(command) - return locals()["resource_attributes"], command + return {"service.name": service_name, **kwargs} path_function = { diff --git a/_configuration/tests/test_configuration.py b/_configuration/tests/test_configuration.py index 253cae28bd4..95c53c18b83 100644 --- a/_configuration/tests/test_configuration.py +++ b/_configuration/tests/test_configuration.py @@ -32,8 +32,6 @@ ) from opentelemetry.configuration._internal.path_function import set_resource -set_trace - data_path = Path(__file__).parent.joinpath("data") @@ -154,6 +152,8 @@ def test_subschemas(): # dictionary the schema components of each plugin component sub schema then # use the resulting schema dictionary to do the validation. + set_trace() + configuration = load_configuration( data_path.joinpath("configuration").joinpath("configuration_0.yaml") ) @@ -161,27 +161,3 @@ def test_subschemas(): # FIXME do the same for configuration components Draft202012Validator(resolved_schema).validate(configuration) - - -def test_dry_run(): - - configuration = load_configuration( - data_path.joinpath("configuration").joinpath("configuration_0.yaml") - ) - - schema_path = data_path.joinpath("schema").joinpath( - "opentelemetry_configuration.json" - ) - - try: - validate_configuration(schema_path, configuration) - except Exception as error: - fail(f"Unexpected exception raised: {error}") - - processed_schema = process_schema(resolve_schema(schema_path)) - - result = create_object( - configuration, processed_schema, "resource", dry_run=True - ) - print() - print(result) From 24822573c853e4db7eaf81d8e0da55e0848a6785 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Tue, 23 Jan 2024 00:42:59 -0600 Subject: [PATCH 08/18] Refactor dry run feature --- _configuration/noxfile.py | 3 ++ _configuration/pyproject.toml | 3 +- .../configuration/_internal/__init__.py | 12 +++-- .../configuration/_internal/path_function.py | 53 ++++++++++++------- _configuration/tests/test_configuration.py | 31 ++++++++++- .../opentelemetry/sdk/resources/__init__.py | 17 ++++++ .../src/opentelemetry/sdk/trace/__init__.py | 8 +++ .../src/opentelemetry/sdk/trace/sampling.py | 24 +++++++++ 8 files changed, 127 insertions(+), 24 deletions(-) diff --git a/_configuration/noxfile.py b/_configuration/noxfile.py index 614a99fd136..0a84d2099c0 100644 --- a/_configuration/noxfile.py +++ b/_configuration/noxfile.py @@ -5,6 +5,9 @@ def test(session): session.install(".") session.install("-r", "requirements.txt") + session.install("../opentelemetry-api") + session.install("../opentelemetry-semantic-conventions") + session.install("../opentelemetry-sdk") if session.posargs: session.run("pytest", *session.posargs) diff --git a/_configuration/pyproject.toml b/_configuration/pyproject.toml index 3f4d97ee97e..0f0cd497845 100644 --- a/_configuration/pyproject.toml +++ b/_configuration/pyproject.toml @@ -34,7 +34,8 @@ dependencies = [ "jsonschema", "pyyaml", "jsonref", - "jinja2" + "jinja2", + "black" ] [project.urls] diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_configuration/src/opentelemetry/configuration/_internal/__init__.py index 891fbd10599..3e71ef7996e 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/__init__.py +++ b/_configuration/src/opentelemetry/configuration/_internal/__init__.py @@ -26,6 +26,7 @@ from jsonschema.validators import Draft202012Validator from referencing import Registry, Resource from yaml import safe_load +from black import format_str, Mode from opentelemetry.configuration._internal.path_function import path_function @@ -69,7 +70,6 @@ def validate_configuration(schema_path: Path, configuration: dict): raise Exception(f"{schema_path} does not exist") def retrieve_from_path(path: str): - set_trace() return Resource.from_contents(json_loads(Path(path).read_text())) Draft202012Validator( @@ -279,7 +279,10 @@ def traverse( def create_object( - configuration: dict, processed_schema: dict, object_name: str + configuration: dict, + processed_schema: dict, + object_name: str, + dry_run=False ) -> object: def create_object( configuration: dict, @@ -359,13 +362,16 @@ def create_object( *positional_arguments, **optional_arguments ) - return create_object( + result = create_object( configuration[object_name], processed_schema[object_name], path_function[object_name], processed_schema, path_function, ) + if dry_run: + return format_str(repr(result), mode=Mode(line_length=1)) + return result def substitute_environment_variables( diff --git a/_configuration/src/opentelemetry/configuration/_internal/path_function.py b/_configuration/src/opentelemetry/configuration/_internal/path_function.py index ca9da03f549..5a08aa5dd04 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/path_function.py +++ b/_configuration/src/opentelemetry/configuration/_internal/path_function.py @@ -42,6 +42,23 @@ _resource = None +class MockSampler(Mock): + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._args = args + self._kwargs = kwargs + + def __repr__(self) -> str: + args = list(self._args).copy() + kwargs = self._kwargs.copy() + + kwargs.pop("type") + args.extend([f"{key}={value}" for key, value in kwargs.items()]) + + return f'{self.type}({", ".join(args)})' + + def set_resource(resource): global _resource _resource = resource @@ -468,8 +485,8 @@ def tracer_provider_sampler( trace_id_ratio_based: object = None, **kwargs ): - return Mock( - type="sampler", + return MockSampler( + type="Sampler", always_off=always_off, always_on=always_on, jaeger_remote=jaeger_remote, @@ -490,8 +507,8 @@ def tracer_provider_sampler_always_on(): def tracer_provider_sampler_jaeger_remote( endpoint: str = None, initial_sampler: object = None, interval: int = None ): - return Mock( - type="jaeger_remote", + return MockSampler( + type="JaegerRemoteSampler", endpoint=endpoint, initial_sampler=initial_sampler, interval=interval, @@ -506,8 +523,8 @@ def tracer_provider_sampler_jaeger_remote_initial_sampler( trace_id_ratio_based: object = None, **kwargs ): - return Mock( - type="initial_sampler", + return MockSampler( + type="InitialSamplerSampler", always_off=always_off, always_on=always_on, jaeger_remote=jaeger_remote, @@ -524,8 +541,8 @@ def tracer_provider_sampler_parent_based( remote_parent_sampled: object = None, root: object = None, ): - return Mock( - type="parent_based", + return MockSampler( + type="ParentBasedSampler", local_parent_not_sampled=local_parent_not_sampled, local_parent_sampled=local_parent_sampled, remote_parent_not_sampled=remote_parent_not_sampled, @@ -542,8 +559,8 @@ def tracer_provider_sampler_parent_based_root( trace_id_ratio_based: object = None, **kwargs ): - return Mock( - type="root", + return MockSampler( + type="RootSampler", always_off=always_off, always_on=always_on, jaeger_remote=jaeger_remote, @@ -561,8 +578,8 @@ def tracer_provider_sampler_parent_based_remote_parent_sampled( trace_id_ratio_based: object = None, **kwargs ): - return Mock( - type="remote_parent_sampled", + return MockSampler( + type="RemoteParentSampledSampler", always_off=always_off, always_on=always_on, jaeger_remote=jaeger_remote, @@ -580,8 +597,8 @@ def tracer_provider_sampler_parent_based_remote_parent_not_sampled( trace_id_ratio_based: object = None, **kwargs ): - return Mock( - type="remote_parent_not_sampled", + return MockSampler( + type="RemoteParentNotSampledSampler", always_off=always_off, always_on=always_on, jaeger_remote=jaeger_remote, @@ -599,8 +616,8 @@ def tracer_provider_sampler_parent_based_local_parent_sampled( trace_id_ratio_based: object = None, **kwargs ): - return Mock( - type="local_parent_sampled", + return MockSampler( + type="LocalParentSampledSampler", always_off=always_off, always_on=always_on, jaeger_remote=jaeger_remote, @@ -618,8 +635,8 @@ def tracer_provider_sampler_parent_based_local_parent_not_sampled( trace_id_ratio_based: object = None, **kwargs ): - return Mock( - type="local_parent_not_sampled", + return MockSampler( + type="LocalParentNotSampledSampler", always_off=always_off, always_on=always_on, jaeger_remote=jaeger_remote, diff --git a/_configuration/tests/test_configuration.py b/_configuration/tests/test_configuration.py index 95c53c18b83..bb1244e2287 100644 --- a/_configuration/tests/test_configuration.py +++ b/_configuration/tests/test_configuration.py @@ -32,6 +32,8 @@ ) from opentelemetry.configuration._internal.path_function import set_resource +set_trace + data_path = Path(__file__).parent.joinpath("data") @@ -152,8 +154,6 @@ def test_subschemas(): # dictionary the schema components of each plugin component sub schema then # use the resulting schema dictionary to do the validation. - set_trace() - configuration = load_configuration( data_path.joinpath("configuration").joinpath("configuration_0.yaml") ) @@ -161,3 +161,30 @@ def test_subschemas(): # FIXME do the same for configuration components Draft202012Validator(resolved_schema).validate(configuration) + + +def test_dry_run(): + + configuration = load_configuration( + data_path.joinpath("configuration").joinpath("configuration_0.yaml") + ) + + schema_path = data_path.joinpath("schema").joinpath( + "opentelemetry_configuration.json" + ) + + try: + validate_configuration(schema_path, configuration) + except Exception as error: + fail(f"Unexpected exception raised: {error}") + + processed_schema = process_schema(resolve_schema(schema_path)) + + set_resource(create_object(configuration, processed_schema, "resource")) + + print() + print( + create_object( + configuration, processed_schema, "tracer_provider", dry_run=True + ) + ) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py index 0ebd42349c4..a19ceec569c 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py @@ -166,6 +166,23 @@ def __init__( schema_url = "" self._schema_url = schema_url + args = [] + kwargs = {} + + args.append(attributes) + + if schema_url is not None: + kwargs["schema_url"] = schema_url + + self._args_kwargs = [repr(arg) for arg in args] + self._args_kwargs.extend( + [f"{key}={repr(value)}" for key, value in kwargs.items()] + ) + self._args_kwargs = ", ".join(self._args_kwargs) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(repr(self._args_kwargs))" + @staticmethod def create( attributes: typing.Optional[Attributes] = None, diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py index 58cbf01e08b..653c577bf09 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py @@ -1221,6 +1221,14 @@ def __init__( if shutdown_on_exit: self._atexit_handler = atexit.register(self.shutdown) + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(" + f"{repr(self.sampler)}," + f"{repr(self._resource)}," + ")" + ) + @property def resource(self) -> Resource: return self._resource diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py index f3f9bf850f1..16faee4ff82 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py @@ -165,6 +165,9 @@ def is_recording(self): def is_sampled(self): return self is Decision.RECORD_AND_SAMPLE + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.value})" + class SamplingResult: """A sampling result as applied to a newly-created Span. @@ -195,6 +198,14 @@ def __init__( class Sampler(abc.ABC): + + def __init__(self, *args, **kwargs) -> None: + self._args_kwargs = [repr(arg) for arg in args] + self._args_kwargs.extend( + [f"{key}={repr(value)}" for key, value in kwargs.items()] + ) + self._args_kwargs = ", ".join(self._args_kwargs) + @abc.abstractmethod def should_sample( self, @@ -212,11 +223,15 @@ def should_sample( def get_description(self) -> str: pass + def __repr__(self) -> str: + return f'{self.__class__.__name__}({self._args_kwargs})' + class StaticSampler(Sampler): """Sampler that always returns the same decision.""" def __init__(self, decision: "Decision") -> None: + super().__init__(decision) self._decision = decision def should_sample( @@ -259,6 +274,7 @@ class TraceIdRatioBased(Sampler): """ def __init__(self, rate: float): + super().__init__(rate) if rate < 0.0 or rate > 1.0: raise ValueError("Probability must be in range [0.0, 1.0].") self._rate = rate @@ -329,6 +345,13 @@ def __init__( local_parent_sampled: Sampler = ALWAYS_ON, local_parent_not_sampled: Sampler = ALWAYS_OFF, ): + super().__init__( + root, + remote_parent_sampled, + remote_parent_not_sampled, + local_parent_sampled, + local_parent_not_sampled + ) self._root = root self._remote_parent_sampled = remote_parent_sampled self._remote_parent_not_sampled = remote_parent_not_sampled @@ -390,6 +413,7 @@ class ParentBasedTraceIdRatio(ParentBased): """ def __init__(self, rate: float): + super().__init__(rate) root = TraceIdRatioBased(rate=rate) super().__init__(root=root) From d88dac020f3002fbf84a7f15271d5e054ac5e22c Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Tue, 23 Jan 2024 00:59:07 -0600 Subject: [PATCH 09/18] Fix string formatting --- opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py index a19ceec569c..7e6c997f003 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/resources/__init__.py @@ -181,7 +181,7 @@ def __init__( self._args_kwargs = ", ".join(self._args_kwargs) def __repr__(self) -> str: - return f"{self.__class__.__name__}(repr(self._args_kwargs))" + return f"{self.__class__.__name__}(repr{self._args_kwargs})" @staticmethod def create( From 8f7d7ce1cf902059b86a7d5257225f2f3b7d4a2a Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Tue, 23 Jan 2024 23:08:46 -0600 Subject: [PATCH 10/18] Rename to file configuration --- _configuration/README.rst | 44 ---- .../LICENSE | 0 _file_configuration/README.rst | 44 ++++ .../noxfile.py | 2 +- .../pyproject.toml | 11 +- .../requirements.txt | 0 .../file_configuration}/__init__.py | 14 +- .../file_configuration}/_internal/__init__.py | 202 ++++++++++++++---- .../_internal/path_function.py | 0 .../_internal/templates/template.jinja2 | 0 .../file_configuration}/py.typed | 0 .../file_configuration}/version.py | 0 .../tests/__init__.py | 0 .../file_configuration_0.yaml | 4 +- .../file_configuration_1.yaml | 4 +- .../tests/data/schema/common.json | 0 .../tests/data/schema/logger_provider.json | 0 .../tests/data/schema/meter_provider.json | 0 .../opentelemetry_file_configuration.json | 4 +- .../tests/data/schema/propagator.json | 0 .../tests/data/schema/resource.json | 0 .../tests/data/schema/tracer_provider.json | 0 .../tests/test_file_configuration.py | 58 ++--- 23 files changed, 256 insertions(+), 131 deletions(-) delete mode 100644 _configuration/README.rst rename {_configuration => _file_configuration}/LICENSE (100%) create mode 100644 _file_configuration/README.rst rename {_configuration => _file_configuration}/noxfile.py (85%) rename {_configuration => _file_configuration}/pyproject.toml (78%) rename {_configuration => _file_configuration}/requirements.txt (100%) rename {_configuration/src/opentelemetry/configuration => _file_configuration/src/opentelemetry/file_configuration}/__init__.py (73%) rename {_configuration/src/opentelemetry/configuration => _file_configuration/src/opentelemetry/file_configuration}/_internal/__init__.py (66%) rename {_configuration/src/opentelemetry/configuration => _file_configuration/src/opentelemetry/file_configuration}/_internal/path_function.py (100%) rename {_configuration/src/opentelemetry/configuration => _file_configuration/src/opentelemetry/file_configuration}/_internal/templates/template.jinja2 (100%) rename {_configuration/src/opentelemetry/configuration => _file_configuration/src/opentelemetry/file_configuration}/py.typed (100%) rename {_configuration/src/opentelemetry/configuration => _file_configuration/src/opentelemetry/file_configuration}/version.py (100%) rename {_configuration => _file_configuration}/tests/__init__.py (100%) rename _configuration/tests/data/configuration/configuration_0.yaml => _file_configuration/tests/data/file_configuration/file_configuration_0.yaml (98%) rename _configuration/tests/data/configuration/configuration_1.yaml => _file_configuration/tests/data/file_configuration/file_configuration_1.yaml (98%) rename {_configuration => _file_configuration}/tests/data/schema/common.json (100%) rename {_configuration => _file_configuration}/tests/data/schema/logger_provider.json (100%) rename {_configuration => _file_configuration}/tests/data/schema/meter_provider.json (100%) rename _configuration/tests/data/schema/opentelemetry_configuration.json => _file_configuration/tests/data/schema/opentelemetry_file_configuration.json (89%) rename {_configuration => _file_configuration}/tests/data/schema/propagator.json (100%) rename {_configuration => _file_configuration}/tests/data/schema/resource.json (100%) rename {_configuration => _file_configuration}/tests/data/schema/tracer_provider.json (100%) rename _configuration/tests/test_configuration.py => _file_configuration/tests/test_file_configuration.py (67%) diff --git a/_configuration/README.rst b/_configuration/README.rst deleted file mode 100644 index a2ae7064a44..00000000000 --- a/_configuration/README.rst +++ /dev/null @@ -1,44 +0,0 @@ -OpenTelemetry Python Configuration Prototype -============================================ - -This component is EXPERIMENTAL and subject to any kind of change at any moment. - -This prototype first needs the ``src/opentelemetry/configuration/_interna/path_function.py`` -to be generated with the ``opentelemetry.configuration.render_schema`` function. - -Once this file is generated, implement the functions defined there. - -To create any provider object first create a ``Resource`` object: - -.. code-block:: python - - from opentelemetry.configuration._internal.path_function import set_resource - from opentelemetry.configuration import ( - resolve_schema, - process_schema, - create_object, - validate_configuration, - ) - from pathlib import Path - - data_path = Path(__file__).parent.joinpath("data") - - configuration = validate_configuration( - data_path.joinpath("kitchen-sink.yaml") - ) - - processed_schema = process_schema( - resolve_schema( - data_path.joinpath("opentelemetry_configuration.json") - ) - ) - - set_resource( - create_object(configuration, processed_schema, "resource") - ) - - tracer_provider = create_object( - configuration, processed_schema, "tracer_provider" - ) - -To run the tests, just run ``nox`` from the directory where ``noxfile.py`` is. diff --git a/_configuration/LICENSE b/_file_configuration/LICENSE similarity index 100% rename from _configuration/LICENSE rename to _file_configuration/LICENSE diff --git a/_file_configuration/README.rst b/_file_configuration/README.rst new file mode 100644 index 00000000000..2cdf0c4b5fa --- /dev/null +++ b/_file_configuration/README.rst @@ -0,0 +1,44 @@ +OpenTelemetry Python File Configuration Prototype +================================================= + +This component is EXPERIMENTAL and subject to any kind of change at any moment. + +This prototype first needs the ``src/opentelemetry/file_configuration/_interna/path_function.py`` +to be generated with the ``opentelemetry.file_configuration.render_schema`` function. + +Once this file is generated, implement the functions defined there. + +To create any provider object first create a ``Resource`` object: + +.. code-block:: python + + from opentelemetry.file_configuration._internal.path_function import set_resource + from opentelemetry.file_configuration import ( + resolve_schema, + process_schema, + create_object, + validate_file_configuration, + ) + from pathlib import Path + + data_path = Path(__file__).parent.joinpath("data") + + file_configuration = validate_file_configuration( + data_path.joinpath("kitchen-sink.yaml") + ) + + processed_schema = process_schema( + resolve_schema( + data_path.joinpath("opentelemetry_file_configuration.json") + ) + ) + + set_resource( + create_object(file_configuration, processed_schema, "resource") + ) + + tracer_provider = create_object( + file_configuration, processed_schema, "tracer_provider" + ) + +To run the tests, just run ``nox`` from the directory where ``noxfile.py`` is. diff --git a/_configuration/noxfile.py b/_file_configuration/noxfile.py similarity index 85% rename from _configuration/noxfile.py rename to _file_configuration/noxfile.py index 0a84d2099c0..f9fe62186f7 100644 --- a/_configuration/noxfile.py +++ b/_file_configuration/noxfile.py @@ -12,4 +12,4 @@ def test(session): if session.posargs: session.run("pytest", *session.posargs) else: - session.run("pytest", "tests/test_configuration.py") + session.run("pytest", "tests/test_file_configuration.py") diff --git a/_configuration/pyproject.toml b/_file_configuration/pyproject.toml similarity index 78% rename from _configuration/pyproject.toml rename to _file_configuration/pyproject.toml index 0f0cd497845..b49fa1f455a 100644 --- a/_configuration/pyproject.toml +++ b/_file_configuration/pyproject.toml @@ -3,9 +3,9 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -name = "opentelemetry-configuration" +name = "opentelemetry-file-configuration" dynamic = ["version"] -description = "OpenTelemetry Python Configuration" +description = "OpenTelemetry Python File Configuration" readme = "README.rst" license = "Apache-2.0" requires-python = ">=3.7" @@ -39,10 +39,13 @@ dependencies = [ ] [project.urls] -Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-configuration" +Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-file-configuration" + +[project.entry-points.opentelemetry_id_generator] +sometimes_monday_on_sampler = "opentelemetry.sdk.trace.id_generator:RandomIdGenerator" [tool.hatch.version] -path = "src/opentelemetry/configuration/version.py" +path = "src/opentelemetry/file_configuration/version.py" [tool.hatch.build.targets.sdist] include = [ diff --git a/_configuration/requirements.txt b/_file_configuration/requirements.txt similarity index 100% rename from _configuration/requirements.txt rename to _file_configuration/requirements.txt diff --git a/_configuration/src/opentelemetry/configuration/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/__init__.py similarity index 73% rename from _configuration/src/opentelemetry/configuration/__init__.py rename to _file_configuration/src/opentelemetry/file_configuration/__init__.py index 173a4b6466e..f714661ea0b 100644 --- a/_configuration/src/opentelemetry/configuration/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/__init__.py @@ -13,27 +13,27 @@ # limitations under the License. """ -The OpenTelemetry Configuration package is an implementation of the -OpenTelemetry Configuration Specification +The OpenTelemetry File Configuration package is an implementation of the +OpenTelemetry File Configuration Specification """ -from opentelemetry.configuration._internal import ( +from opentelemetry.file_configuration._internal import ( create_object, - load_configuration, + load_file_configuration, process_schema, render_schema, resolve_schema, substitute_environment_variables, - validate_configuration, + validate_file_configuration, ) __all__ = [ "resolve_schema", - "validate_configuration", + "validate_file_configuration", "process_schema", "render_schema", "create_object", - "load_configuration", + "load_file_configuration", "substitute_environment_variables", ] diff --git a/_configuration/src/opentelemetry/configuration/_internal/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py similarity index 66% rename from _configuration/src/opentelemetry/configuration/_internal/__init__.py rename to _file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py index 3e71ef7996e..60d0c13805e 100644 --- a/_configuration/src/opentelemetry/configuration/_internal/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py @@ -18,6 +18,16 @@ from os.path import exists from pathlib import Path from re import compile as re_compile +from abc import ABC, abstractmethod +from logging import getLogger +from datetime import datetime +from random import random + +from typing import Optional, Sequence +from opentelemetry.trace import Link, SpanKind +from opentelemetry.trace.span import TraceState +from opentelemetry.util.types import Attributes +from opentelemetry.context import Context from ipdb import set_trace from jinja2 import Environment, FileSystemLoader @@ -27,8 +37,12 @@ from referencing import Registry, Resource from yaml import safe_load from black import format_str, Mode +from opentelemetry.util._importlib_metadata import entry_points + +from opentelemetry.file_configuration._internal.path_function import path_function +from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult -from opentelemetry.configuration._internal.path_function import path_function +_logger = getLogger(__file__) set_trace @@ -43,6 +57,94 @@ } +class FileConfigurationPlugin(ABC): + + @property + @abstractmethod + def schema(self) -> dict: + """ + Returns the plugin schema. + """ + + @property + @abstractmethod + def schema_path(self) -> list: + """ + Returns the path for the plugin schema. + """ + + @staticmethod + @abstractmethod + def function(*args, **kwargs) -> object: + """ + The function that will instantiate the plugin object. + """ + + +class SometimesMondayOnSampler(Sampler): + """ + A sampler that samples only on Mondays, but sometimes. + """ + + def __init__(self, probability: float) -> None: + super().__init__(probability) + self._probability = probability + + def should_sample( + self, + parent_context: Optional["Context"], + trace_id: int, + name: str, + kind: SpanKind = None, + attributes: Attributes = None, + links: Sequence["Link"] = None, + trace_state: "TraceState" = None, + ) -> SamplingResult: + return datetime.now().weekday() == 0 and random() < self._probability + + def get_description(self) -> str: + return self.__class__.__name__ + + +class SometimesMondaysOnSamplerPlugin(FileConfigurationPlugin): + + @property + def schema(self) -> dict: + """ + Returns the plugin schema. + """ + return { + "sometimes_monday_on": { + "type": "object", + "additionalProperties": False, + "properties": { + "probability": { + "type": "number" + }, + } + } + } + + @property + def schema_path(self) -> list: + """ + Returns the path for the plugin schema. + """ + return [ + "properties", + "tracer_provider", + "properties", + "sampler", + "properties" + ] + + @staticmethod + def function(probability: float) -> object: + """ + The function that will instantiate the plugin object. + """ + + def resolve_schema(json_file_path) -> dict: root_path = json_file_path.absolute() @@ -52,17 +154,37 @@ def resolve_schema(json_file_path) -> dict: json_file.read(), base_uri=root_path.as_uri() ) + for entry_point in entry_points(group="opentelemetry_file_configuration"): + + plugin = entry_point.load()() + + sub_dictionary = dictionary + + schema_path = [] + + for schema_path_part in plugin.schema_path: + schema_path.append(schema_path_part) + try: + sub_dictionary = sub_dictionary[schema_path_part] + except KeyError: + _logger.warning( + "Unable to add plugin %s to schema: wrong path %s", + entry_point.name, + ",".join(schema_path) + ) + break + return dictionary -def load_configuration(configuration_file_path: str) -> dict: +def load_file_configuration(file_configuration_file_path: str) -> dict: - with open(configuration_file_path, "r") as configuration_file: + with open(file_configuration_file_path, "r") as file_configuration_file: - return safe_load(configuration_file) + return safe_load(file_configuration_file) -def validate_configuration(schema_path: Path, configuration: dict): +def validate_file_configuration(schema_path: Path, file_configuration: dict): schema_path = str(schema_path) @@ -74,7 +196,7 @@ def retrieve_from_path(path: str): Draft202012Validator( {"$ref": schema_path}, registry=Registry(retrieve=retrieve_from_path) - ).validate(configuration) + ).validate(file_configuration) def process_schema(schema: dict) -> dict: @@ -279,13 +401,13 @@ def traverse( def create_object( - configuration: dict, + file_configuration: dict, processed_schema: dict, object_name: str, dry_run=False ) -> object: def create_object( - configuration: dict, + file_configuration: dict, processed_schema: dict, path_function: dict, original_processed_schema: dict, @@ -295,9 +417,9 @@ def create_object( positional_arguments = [] optional_arguments = {} - for configuration_key, configuration_value in configuration.items(): + for file_configuration_key, file_configuration_value in file_configuration.items(): - if isinstance(configuration_value, dict): + if isinstance(file_configuration_value, dict): if processed_schema["recursive_path"]: @@ -311,36 +433,36 @@ def create_object( new_path_function = new_path_function[path]["children"] new_processed_schema = new_processed_schema[ - configuration_key + file_configuration_key ] - new_path_function = new_path_function[configuration_key] + new_path_function = new_path_function[file_configuration_key] else: new_processed_schema = processed_schema["children"][ - configuration_key + file_configuration_key ] new_path_function = path_function["children"][ - configuration_key + file_configuration_key ] object_ = create_object( - configuration_value, + file_configuration_value, new_processed_schema, new_path_function, original_processed_schema, original_path_function, ) - elif isinstance(configuration_value, list): + elif isinstance(file_configuration_value, list): object_ = [] - for element in configuration_value: + for element in file_configuration_value: object_.append( create_object( element, - processed_schema["children"][configuration_key], - path_function["children"][configuration_key], + processed_schema["children"][file_configuration_key], + path_function["children"][file_configuration_key], original_processed_schema, original_path_function, ) @@ -348,22 +470,22 @@ def create_object( else: - object_ = configuration_value + object_ = file_configuration_value - if configuration_key in ( + if file_configuration_key in ( processed_schema["positional_attributes"].keys() ): positional_arguments.append(object_) else: - optional_arguments[configuration_key] = object_ + optional_arguments[file_configuration_key] = object_ return path_function["function"]( *positional_arguments, **optional_arguments ) result = create_object( - configuration[object_name], + file_configuration[object_name], processed_schema[object_name], path_function[object_name], processed_schema, @@ -375,22 +497,22 @@ def create_object( def substitute_environment_variables( - configuration: dict, processed_schema: dict + file_configuration: dict, processed_schema: dict ) -> dict: def traverse( - configuration: dict, + file_configuration: dict, processed_schema: dict, original_processed_schema: dict, ): - for configuration_key, configuration_value in configuration.items(): + for file_configuration_key, file_configuration_value in file_configuration.items(): - if configuration_key not in processed_schema.keys(): + if file_configuration_key not in processed_schema.keys(): continue - if isinstance(configuration_value, dict): + if isinstance(file_configuration_value, dict): - recursive_paths = processed_schema[configuration_key][ + recursive_paths = processed_schema[file_configuration_key][ "recursive_path" ] @@ -402,32 +524,32 @@ def traverse( children = children[recursive_path]["children"] else: - children = processed_schema[configuration_key]["children"] + children = processed_schema[file_configuration_key]["children"] traverse( - configuration_value, children, original_processed_schema + file_configuration_value, children, original_processed_schema ) - elif isinstance(configuration_value, list): + elif isinstance(file_configuration_value, list): - for element in configuration_value: + for element in file_configuration_value: if isinstance(element, dict): traverse( element, - processed_schema[configuration_key]["children"], + processed_schema[file_configuration_key]["children"], original_processed_schema, ) - elif isinstance(configuration_value, str): + elif isinstance(file_configuration_value, str): - match = _environment_variable_regex.match(configuration_value) + match = _environment_variable_regex.match(file_configuration_value) if match is not None: - configuration[configuration_key] = __builtins__[ - processed_schema[configuration_key] + file_configuration[file_configuration_key] = __builtins__[ + processed_schema[file_configuration_key] ](environ.get(match.group(1))) - traverse(configuration, processed_schema, processed_schema) + traverse(file_configuration, processed_schema, processed_schema) - return configuration + return file_configuration diff --git a/_configuration/src/opentelemetry/configuration/_internal/path_function.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/path_function.py similarity index 100% rename from _configuration/src/opentelemetry/configuration/_internal/path_function.py rename to _file_configuration/src/opentelemetry/file_configuration/_internal/path_function.py diff --git a/_configuration/src/opentelemetry/configuration/_internal/templates/template.jinja2 b/_file_configuration/src/opentelemetry/file_configuration/_internal/templates/template.jinja2 similarity index 100% rename from _configuration/src/opentelemetry/configuration/_internal/templates/template.jinja2 rename to _file_configuration/src/opentelemetry/file_configuration/_internal/templates/template.jinja2 diff --git a/_configuration/src/opentelemetry/configuration/py.typed b/_file_configuration/src/opentelemetry/file_configuration/py.typed similarity index 100% rename from _configuration/src/opentelemetry/configuration/py.typed rename to _file_configuration/src/opentelemetry/file_configuration/py.typed diff --git a/_configuration/src/opentelemetry/configuration/version.py b/_file_configuration/src/opentelemetry/file_configuration/version.py similarity index 100% rename from _configuration/src/opentelemetry/configuration/version.py rename to _file_configuration/src/opentelemetry/file_configuration/version.py diff --git a/_configuration/tests/__init__.py b/_file_configuration/tests/__init__.py similarity index 100% rename from _configuration/tests/__init__.py rename to _file_configuration/tests/__init__.py diff --git a/_configuration/tests/data/configuration/configuration_0.yaml b/_file_configuration/tests/data/file_configuration/file_configuration_0.yaml similarity index 98% rename from _configuration/tests/data/configuration/configuration_0.yaml rename to _file_configuration/tests/data/file_configuration/file_configuration_0.yaml index 11ea8830778..c1465a87a86 100644 --- a/_configuration/tests/data/configuration/configuration_0.yaml +++ b/_file_configuration/tests/data/file_configuration/file_configuration_0.yaml @@ -1,6 +1,6 @@ # kitchen-sink.yaml demonstrates all configurable surface area, including explanatory comments. # -# It DOES NOT represent expected real world configuration, as it makes strange configuration +# It DOES NOT represent expected real world file configuration, as it makes strange file configuration # choices in an effort to exercise the full surface area. # # Configuration values are set to their defaults when default values are defined. @@ -181,7 +181,7 @@ meter_provider: exporter: # Configure exporter to be console. console: {} - # Configure views. Each view has a selector which determines the instrument(s) it applies to, and a configuration for the resulting stream(s). + # Configure views. Each view has a selector which determines the instrument(s) it applies to, and a file configuration for the resulting stream(s). views: # Configure a view. - selector: diff --git a/_configuration/tests/data/configuration/configuration_1.yaml b/_file_configuration/tests/data/file_configuration/file_configuration_1.yaml similarity index 98% rename from _configuration/tests/data/configuration/configuration_1.yaml rename to _file_configuration/tests/data/file_configuration/file_configuration_1.yaml index 83f4d974d7d..e4e4d3658f6 100644 --- a/_configuration/tests/data/configuration/configuration_1.yaml +++ b/_file_configuration/tests/data/file_configuration/file_configuration_1.yaml @@ -1,6 +1,6 @@ # kitchen-sink.yaml demonstrates all configurable surface area, including explanatory comments. # -# It DOES NOT represent expected real world configuration, as it makes strange configuration +# It DOES NOT represent expected real world file configuration, as it makes strange file configuration # choices in an effort to exercise the full surface area. # # Configuration values are set to their defaults when default values are defined. @@ -181,7 +181,7 @@ meter_provider: exporter: # Configure exporter to be console. console: {} - # Configure views. Each view has a selector which determines the instrument(s) it applies to, and a configuration for the resulting stream(s). + # Configure views. Each view has a selector which determines the instrument(s) it applies to, and a file configuration for the resulting stream(s). views: # Configure a view. - selector: diff --git a/_configuration/tests/data/schema/common.json b/_file_configuration/tests/data/schema/common.json similarity index 100% rename from _configuration/tests/data/schema/common.json rename to _file_configuration/tests/data/schema/common.json diff --git a/_configuration/tests/data/schema/logger_provider.json b/_file_configuration/tests/data/schema/logger_provider.json similarity index 100% rename from _configuration/tests/data/schema/logger_provider.json rename to _file_configuration/tests/data/schema/logger_provider.json diff --git a/_configuration/tests/data/schema/meter_provider.json b/_file_configuration/tests/data/schema/meter_provider.json similarity index 100% rename from _configuration/tests/data/schema/meter_provider.json rename to _file_configuration/tests/data/schema/meter_provider.json diff --git a/_configuration/tests/data/schema/opentelemetry_configuration.json b/_file_configuration/tests/data/schema/opentelemetry_file_configuration.json similarity index 89% rename from _configuration/tests/data/schema/opentelemetry_configuration.json rename to _file_configuration/tests/data/schema/opentelemetry_file_configuration.json index d606299373f..1013705252c 100644 --- a/_configuration/tests/data/schema/opentelemetry_configuration.json +++ b/_file_configuration/tests/data/schema/opentelemetry_file_configuration.json @@ -1,7 +1,7 @@ { - "$id": "https://opentelemetry.io/otelconfig/opentelemetry_configuration.json", + "$id": "https://opentelemetry.io/otelconfig/opentelemetry_file_configuration.json", "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "OpenTelemetryConfiguration", + "title": "OpenTelemetryFileConfiguration", "type": "object", "additionalProperties": true, "properties": { diff --git a/_configuration/tests/data/schema/propagator.json b/_file_configuration/tests/data/schema/propagator.json similarity index 100% rename from _configuration/tests/data/schema/propagator.json rename to _file_configuration/tests/data/schema/propagator.json diff --git a/_configuration/tests/data/schema/resource.json b/_file_configuration/tests/data/schema/resource.json similarity index 100% rename from _configuration/tests/data/schema/resource.json rename to _file_configuration/tests/data/schema/resource.json diff --git a/_configuration/tests/data/schema/tracer_provider.json b/_file_configuration/tests/data/schema/tracer_provider.json similarity index 100% rename from _configuration/tests/data/schema/tracer_provider.json rename to _file_configuration/tests/data/schema/tracer_provider.json diff --git a/_configuration/tests/test_configuration.py b/_file_configuration/tests/test_file_configuration.py similarity index 67% rename from _configuration/tests/test_configuration.py rename to _file_configuration/tests/test_file_configuration.py index bb1244e2287..c9a9f5e74eb 100644 --- a/_configuration/tests/test_configuration.py +++ b/_file_configuration/tests/test_file_configuration.py @@ -21,16 +21,16 @@ from jsonschema.validators import Draft202012Validator from pytest import fail -from opentelemetry.configuration import ( +from opentelemetry.file_configuration import ( create_object, - load_configuration, + load_file_configuration, process_schema, render_schema, resolve_schema, substitute_environment_variables, - validate_configuration, + validate_file_configuration, ) -from opentelemetry.configuration._internal.path_function import set_resource +from opentelemetry.file_configuration._internal.path_function import set_resource set_trace @@ -39,25 +39,25 @@ def test_create_object(): - configuration = load_configuration( - data_path.joinpath("configuration").joinpath("configuration_0.yaml") + file_configuration = load_file_configuration( + data_path.joinpath("file_configuration").joinpath("file_configuration_0.yaml") ) schema_path = data_path.joinpath("schema").joinpath( - "opentelemetry_configuration.json" + "opentelemetry_file_configuration.json" ) try: - validate_configuration(schema_path, configuration) + validate_file_configuration(schema_path, file_configuration) except Exception as error: fail(f"Unexpected exception raised: {error}") processed_schema = process_schema(resolve_schema(schema_path)) - set_resource(create_object(configuration, processed_schema, "resource")) + set_resource(create_object(file_configuration, processed_schema, "resource")) tracer_provider = create_object( - configuration, processed_schema, "tracer_provider" + file_configuration, processed_schema, "tracer_provider" ) assert ( @@ -101,26 +101,26 @@ def test_create_object(): @patch.dict(environ, {"OTEL_BLRB_EXPORT_TIMEOUT": "943"}, clear=True) def test_substitute_environment_variables(): - configuration = load_configuration( - data_path.joinpath("configuration").joinpath("configuration_1.yaml") + file_configuration = load_file_configuration( + data_path.joinpath("file_configuration").joinpath("file_configuration_1.yaml") ) schema_path = data_path.joinpath("schema").joinpath( - "opentelemetry_configuration.json" + "opentelemetry_file_configuration.json" ) processed_schema = process_schema(resolve_schema(schema_path)) - configuration = substitute_environment_variables( - configuration, processed_schema + file_configuration = substitute_environment_variables( + file_configuration, processed_schema ) assert ( - configuration["logger_provider"]["processors"][0]["batch"][ + file_configuration["logger_provider"]["processors"][0]["batch"][ "export_timeout" ] ) == 943 try: - validate_configuration(schema_path, configuration) + validate_file_configuration(schema_path, file_configuration) except Exception as error: fail(f"Unexpected exception raised: {error}") @@ -132,7 +132,7 @@ def test_render(tmpdir): process_schema( resolve_schema( data_path.joinpath("schema").joinpath( - "opentelemetry_configuration.json" + "opentelemetry_file_configuration.json" ) ) ), @@ -145,7 +145,7 @@ def test_render(tmpdir): def test_subschemas(): schema_path = data_path.joinpath("schema").joinpath( - "opentelemetry_configuration.json" + "opentelemetry_file_configuration.json" ) resolved_schema = resolve_schema(schema_path) resolved_schema @@ -154,37 +154,37 @@ def test_subschemas(): # dictionary the schema components of each plugin component sub schema then # use the resulting schema dictionary to do the validation. - configuration = load_configuration( - data_path.joinpath("configuration").joinpath("configuration_0.yaml") + file_configuration = load_file_configuration( + data_path.joinpath("file_configuration").joinpath("file_configuration_0.yaml") ) - # FIXME do the same for configuration components + # FIXME do the same for file_configuration components - Draft202012Validator(resolved_schema).validate(configuration) + Draft202012Validator(resolved_schema).validate(file_configuration) def test_dry_run(): - configuration = load_configuration( - data_path.joinpath("configuration").joinpath("configuration_0.yaml") + file_configuration = load_file_configuration( + data_path.joinpath("file_configuration").joinpath("file_configuration_0.yaml") ) schema_path = data_path.joinpath("schema").joinpath( - "opentelemetry_configuration.json" + "opentelemetry_file_configuration.json" ) try: - validate_configuration(schema_path, configuration) + validate_file_configuration(schema_path, file_configuration) except Exception as error: fail(f"Unexpected exception raised: {error}") processed_schema = process_schema(resolve_schema(schema_path)) - set_resource(create_object(configuration, processed_schema, "resource")) + set_resource(create_object(file_configuration, processed_schema, "resource")) print() print( create_object( - configuration, processed_schema, "tracer_provider", dry_run=True + file_configuration, processed_schema, "tracer_provider", dry_run=True ) ) From caa49c97f4edc2185dfd0888610cd09625f1dd31 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Wed, 24 Jan 2024 01:06:41 -0600 Subject: [PATCH 11/18] Add schema validation for plugin --- _file_configuration/pyproject.toml | 4 +- .../file_configuration/__init__.py | 2 + .../file_configuration/_internal/__init__.py | 31 ++-- .../file_configuration_2.yaml | 163 ++++++++++++++++++ .../tests/test_file_configuration.py | 32 ++++ 5 files changed, 211 insertions(+), 21 deletions(-) create mode 100644 _file_configuration/tests/data/file_configuration/file_configuration_2.yaml diff --git a/_file_configuration/pyproject.toml b/_file_configuration/pyproject.toml index b49fa1f455a..2d8839e48f0 100644 --- a/_file_configuration/pyproject.toml +++ b/_file_configuration/pyproject.toml @@ -41,8 +41,8 @@ dependencies = [ [project.urls] Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-file-configuration" -[project.entry-points.opentelemetry_id_generator] -sometimes_monday_on_sampler = "opentelemetry.sdk.trace.id_generator:RandomIdGenerator" +[project.entry-points.opentelemetry_file_configuration] +sometimes_mondays_on_sampler = "opentelemetry.file_configuration:SometimesMondaysOnSamplerPlugin" [tool.hatch.version] path = "src/opentelemetry/file_configuration/version.py" diff --git a/_file_configuration/src/opentelemetry/file_configuration/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/__init__.py index f714661ea0b..369c13db0a7 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/__init__.py @@ -26,6 +26,7 @@ resolve_schema, substitute_environment_variables, validate_file_configuration, + SometimesMondaysOnSamplerPlugin ) __all__ = [ @@ -36,4 +37,5 @@ "create_object", "load_file_configuration", "substitute_environment_variables", + "SometimesMondayOnSamplerPlugin" ] diff --git a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py index 60d0c13805e..d9eebe0b927 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py @@ -13,9 +13,7 @@ # limitations under the License. from collections import OrderedDict -from json import loads as json_loads from os import environ -from os.path import exists from pathlib import Path from re import compile as re_compile from abc import ABC, abstractmethod @@ -34,7 +32,6 @@ from jsonref import JsonRef from jsonref import loads as jsonref_loads from jsonschema.validators import Draft202012Validator -from referencing import Registry, Resource from yaml import safe_load from black import format_str, Mode from opentelemetry.util._importlib_metadata import entry_points @@ -81,7 +78,7 @@ def function(*args, **kwargs) -> object: """ -class SometimesMondayOnSampler(Sampler): +class SometimesMondaysOnSampler(Sampler): """ A sampler that samples only on Mondays, but sometimes. """ @@ -114,7 +111,7 @@ def schema(self) -> dict: Returns the plugin schema. """ return { - "sometimes_monday_on": { + "sometimes_mondays_on": { "type": "object", "additionalProperties": False, "properties": { @@ -139,10 +136,11 @@ def schema_path(self) -> list: ] @staticmethod - def function(probability: float) -> object: + def function(probability: float) -> SometimesMondaysOnSampler: """ The function that will instantiate the plugin object. """ + return SometimesMondaysOnSampler(probability) def resolve_schema(json_file_path) -> dict: @@ -173,6 +171,9 @@ def resolve_schema(json_file_path) -> dict: ",".join(schema_path) ) break + else: + for key, value in plugin.schema.items(): + sub_dictionary[key] = value return dictionary @@ -184,19 +185,11 @@ def load_file_configuration(file_configuration_file_path: str) -> dict: return safe_load(file_configuration_file) -def validate_file_configuration(schema_path: Path, file_configuration: dict): - - schema_path = str(schema_path) - - if not exists(schema_path): - raise Exception(f"{schema_path} does not exist") - - def retrieve_from_path(path: str): - return Resource.from_contents(json_loads(Path(path).read_text())) - - Draft202012Validator( - {"$ref": schema_path}, registry=Registry(retrieve=retrieve_from_path) - ).validate(file_configuration) +def validate_file_configuration( + schema: dict, + file_configuration: dict +) -> None: + Draft202012Validator(schema).validate(file_configuration) def process_schema(schema: dict) -> dict: diff --git a/_file_configuration/tests/data/file_configuration/file_configuration_2.yaml b/_file_configuration/tests/data/file_configuration/file_configuration_2.yaml new file mode 100644 index 00000000000..a448bef3944 --- /dev/null +++ b/_file_configuration/tests/data/file_configuration/file_configuration_2.yaml @@ -0,0 +1,163 @@ +# kitchen-sink.yaml demonstrates all configurable surface area, including explanatory comments. +# +# It DOES NOT represent expected real world file configuration, as it makes strange file configuration +# choices in an effort to exercise the full surface area. +# +# Configuration values are set to their defaults when default values are defined. + +# The file format version +file_format: "0.1" + +# Configure if the SDK is disabled or not. This is not required to be provided +# to ensure the SDK isn't disabled, the default value when this is not provided +# is for the SDK to be enabled. +# +# Environment variable: OTEL_SDK_DISABLED +disabled: false + +# Configure general attribute limits. See also tracer_provider.limits, logger_provider.limits. +attribute_limits: + # Configure max attribute value size. + # + # Environment variable: OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max attribute count. + # + # Environment variable: OTEL_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + +# Configure text map context propagators. +# +# Environment variable: OTEL_PROPAGATORS +propagator: + composite: [tracecontext, baggage, b3, b3multi, jaeger, xray, ottrace] + +# Configure tracer provider. +tracer_provider: + # Configure span processors. + processors: + # Configure a batch span processor. + - batch: + # Configure delay interval (in milliseconds) between two consecutive exports. + # + # Environment variable: OTEL_BSP_SCHEDULE_DELAY + schedule_delay: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_BSP_EXPORT_TIMEOUT + export_timeout: 30000 + # Configure maximum queue size. + # + # Environment variable: OTEL_BSP_MAX_QUEUE_SIZE + max_queue_size: 2048 + # Configure maximum batch size. + # + # Environment variable: OTEL_BSP_MAX_EXPORT_BATCH_SIZE + max_export_batch_size: 512 + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_TRACES_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_TRACES_HEADERS + headers: + api-key: !!str 1234 + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_TRACES_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_TRACES_TIMEOUT + timeout: 10000 + # Configure a batch span processor. + - batch: + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + exporter: + # Configure exporter to be zipkin. + zipkin: + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_ENDPOINT + endpoint: http://localhost:9411/api/v2/spans + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_TIMEOUT + timeout: 10000 + # Configure a simple span processor. + - simple: + # Configure exporter. + exporter: + # Configure exporter to be console. + console: {} + # Configure span limits. See also attribute_limits. + limits: + # Configure max span attribute value size. Overrides attribute_limits.attribute_value_length_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max span attribute count. Overrides attribute_limits.attribute_count_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + # Configure max span event count. + # + # Environment variable: OTEL_SPAN_EVENT_COUNT_LIMIT + event_count_limit: 128 + # Configure max span link count. + # + # Environment variable: OTEL_SPAN_LINK_COUNT_LIMIT + link_count_limit: 128 + # Configure max attributes per span event. + # + # Environment variable: OTEL_EVENT_ATTRIBUTE_COUNT_LIMIT + event_attribute_count_limit: 128 + # Configure max attributes per span link. + # + # Environment variable: OTEL_LINK_ATTRIBUTE_COUNT_LIMIT + link_attribute_count_limit: 128 + # Configure the sampler. + sampler: + # Configure sampler to be parent_based. Known values include: always_off, always_on, jaeger_remote, parent_based, trace_id_ratio_based. + # + # Environment variable: OTEL_TRACES_SAMPLER=parentbased_* + sometimes_mondays_on: + probability: 0.8 + +# Configure resource for all signals. +resource: + # Configure resource attributes. + # + # Environment variable: OTEL_RESOURCE_ATTRIBUTES + attributes: + # Configure `service.name` resource attribute + # + # Environment variable: OTEL_SERVICE_NAME + service.name: !!str "unknown_service" + # Configure the resource schema URL. + schema_url: https://opentelemetry.io/schemas/1.16.0 diff --git a/_file_configuration/tests/test_file_configuration.py b/_file_configuration/tests/test_file_configuration.py index c9a9f5e74eb..1cc13ef8264 100644 --- a/_file_configuration/tests/test_file_configuration.py +++ b/_file_configuration/tests/test_file_configuration.py @@ -188,3 +188,35 @@ def test_dry_run(): file_configuration, processed_schema, "tracer_provider", dry_run=True ) ) + + +def test_plugin(): + + file_configuration = load_file_configuration( + data_path.joinpath("file_configuration"). + joinpath("file_configuration_2.yaml") + ) + + schema_path = data_path.joinpath("schema").joinpath( + "opentelemetry_file_configuration.json" + ) + + resolved_schema = resolve_schema(schema_path) + + try: + validate_file_configuration(resolved_schema, file_configuration) + except Exception as error: + fail(f"Unexpected exception raised: {error}") + + assert ( + resolved_schema + ["properties"] + ["tracer_provider"] + ["properties"] + ["sampler"] + ["properties"] + ["sometimes_mondays_on"] + ["properties"] + ["probability"] + ["type"] + ) == "number" From 9a366fe7c1a5d9e5d60081f02af309f9137756f5 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Wed, 24 Jan 2024 01:17:02 -0600 Subject: [PATCH 12/18] Fix test cases --- .../tests/test_file_configuration.py | 58 ++++++++----------- 1 file changed, 24 insertions(+), 34 deletions(-) diff --git a/_file_configuration/tests/test_file_configuration.py b/_file_configuration/tests/test_file_configuration.py index 1cc13ef8264..75842cad60d 100644 --- a/_file_configuration/tests/test_file_configuration.py +++ b/_file_configuration/tests/test_file_configuration.py @@ -18,7 +18,6 @@ from unittest.mock import patch from ipdb import set_trace -from jsonschema.validators import Draft202012Validator from pytest import fail from opentelemetry.file_configuration import ( @@ -40,19 +39,22 @@ def test_create_object(): file_configuration = load_file_configuration( - data_path.joinpath("file_configuration").joinpath("file_configuration_0.yaml") + data_path.joinpath("file_configuration"). + joinpath("file_configuration_0.yaml") ) schema_path = data_path.joinpath("schema").joinpath( "opentelemetry_file_configuration.json" ) + resolved_schema = resolve_schema(schema_path) + try: - validate_file_configuration(schema_path, file_configuration) + validate_file_configuration(resolved_schema, file_configuration) except Exception as error: fail(f"Unexpected exception raised: {error}") - processed_schema = process_schema(resolve_schema(schema_path)) + processed_schema = process_schema(resolved_schema) set_resource(create_object(file_configuration, processed_schema, "resource")) @@ -101,28 +103,34 @@ def test_create_object(): @patch.dict(environ, {"OTEL_BLRB_EXPORT_TIMEOUT": "943"}, clear=True) def test_substitute_environment_variables(): + file_configuration = load_file_configuration( - data_path.joinpath("file_configuration").joinpath("file_configuration_1.yaml") + data_path.joinpath("file_configuration"). + joinpath("file_configuration_1.yaml") ) schema_path = data_path.joinpath("schema").joinpath( "opentelemetry_file_configuration.json" ) - processed_schema = process_schema(resolve_schema(schema_path)) + resolved_schema = resolve_schema(schema_path) + + processed_schema = process_schema(resolved_schema) + file_configuration = substitute_environment_variables( file_configuration, processed_schema ) + try: + validate_file_configuration(resolved_schema, file_configuration) + except Exception as error: + fail(f"Unexpected exception raised: {error}") + assert ( file_configuration["logger_provider"]["processors"][0]["batch"][ "export_timeout" ] ) == 943 - try: - validate_file_configuration(schema_path, file_configuration) - except Exception as error: - fail(f"Unexpected exception raised: {error}") def test_render(tmpdir): @@ -142,43 +150,25 @@ def test_render(tmpdir): fail(f"Unexpected exception raised: {error}") -def test_subschemas(): - - schema_path = data_path.joinpath("schema").joinpath( - "opentelemetry_file_configuration.json" - ) - resolved_schema = resolve_schema(schema_path) - resolved_schema - - # FIXME once the schema has been resolved, we get a dictionary. Add to this - # dictionary the schema components of each plugin component sub schema then - # use the resulting schema dictionary to do the validation. - - file_configuration = load_file_configuration( - data_path.joinpath("file_configuration").joinpath("file_configuration_0.yaml") - ) - - # FIXME do the same for file_configuration components - - Draft202012Validator(resolved_schema).validate(file_configuration) - - def test_dry_run(): file_configuration = load_file_configuration( - data_path.joinpath("file_configuration").joinpath("file_configuration_0.yaml") + data_path.joinpath("file_configuration"). + joinpath("file_configuration_0.yaml") ) schema_path = data_path.joinpath("schema").joinpath( "opentelemetry_file_configuration.json" ) + resolved_schema = resolve_schema(schema_path) + try: - validate_file_configuration(schema_path, file_configuration) + validate_file_configuration(resolved_schema, file_configuration) except Exception as error: fail(f"Unexpected exception raised: {error}") - processed_schema = process_schema(resolve_schema(schema_path)) + processed_schema = process_schema(resolved_schema) set_resource(create_object(file_configuration, processed_schema, "resource")) From bd20e969e923cc699e750fd1896a43fe4d17504a Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Wed, 24 Jan 2024 19:35:51 -0600 Subject: [PATCH 13/18] Add setting of path function for plugins --- .../file_configuration/_internal/__init__.py | 67 +++++++++++++++---- .../tests/test_file_configuration.py | 15 ++++- 2 files changed, 68 insertions(+), 14 deletions(-) diff --git a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py index d9eebe0b927..ef5ea077bef 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py @@ -53,6 +53,17 @@ "number": float, } +_path_function = path_function + + +def get_path_function() -> dict: + return _path_function + + +def set_path_function(path_function: dict) -> None: + global _path_function + _path_function = path_function + class FileConfigurationPlugin(ABC): @@ -77,6 +88,13 @@ def function(*args, **kwargs) -> object: The function that will instantiate the plugin object. """ + @property + def recursive_path(self) -> list: + """ + The recursive path for the plugin object if any. + """ + return [] + class SometimesMondaysOnSampler(Sampler): """ @@ -106,12 +124,13 @@ def get_description(self) -> str: class SometimesMondaysOnSamplerPlugin(FileConfigurationPlugin): @property - def schema(self) -> dict: + def schema(self) -> tuple: """ Returns the plugin schema. """ - return { - "sometimes_mondays_on": { + return ( + "sometimes_mondays_on", + { "type": "object", "additionalProperties": False, "properties": { @@ -120,7 +139,7 @@ def schema(self) -> dict: }, } } - } + ) @property def schema_path(self) -> list: @@ -148,7 +167,7 @@ def resolve_schema(json_file_path) -> dict: root_path = json_file_path.absolute() with open(json_file_path, "r") as json_file: - dictionary = jsonref_loads( + resolved_schema = jsonref_loads( json_file.read(), base_uri=root_path.as_uri() ) @@ -156,26 +175,46 @@ def resolve_schema(json_file_path) -> dict: plugin = entry_point.load()() - sub_dictionary = dictionary + sub_resolved_schema = resolved_schema schema_path = [] for schema_path_part in plugin.schema_path: schema_path.append(schema_path_part) try: - sub_dictionary = sub_dictionary[schema_path_part] + sub_resolved_schema = sub_resolved_schema[schema_path_part] except KeyError: _logger.warning( - "Unable to add plugin %s to schema: wrong path %s", + "Unable to add plugin %s to schema: wrong schema path %s", entry_point.name, ",".join(schema_path) ) break else: - for key, value in plugin.schema.items(): - sub_dictionary[key] = value + sub_resolved_schema[plugin.schema[0]] = plugin.schema[1] - return dictionary + original_path_function = get_path_function() + sub_path_function = original_path_function + + for schema_path_part in plugin.schema_path: + + if schema_path_part == "properties": + continue + + sub_path_function = ( + sub_path_function[schema_path_part]["children"] + ) + + sub_path_function[plugin.schema[0]] = {} + sub_path_function[plugin.schema[0]]["function"] = plugin.function + sub_path_function[plugin.schema[0]]["children"] = {} + sub_path_function[plugin.schema[0]]["recursive_path"] = ( + plugin.recursive_path + ) + + set_path_function(original_path_function) + + return resolved_schema def load_file_configuration(file_configuration_file_path: str) -> dict: @@ -231,7 +270,7 @@ def traverse( for positional_attribute in positional_attributes: - result_positional_attributes[positional_attribute] = str( + result_positional_attributes[positional_attribute] = ( _type_type[ schema_properties[positional_attribute]["type"] ].__name__ @@ -239,7 +278,7 @@ def traverse( for optional_attribute in optional_attributes: - result_optional_attributes[optional_attribute] = str( + result_optional_attributes[optional_attribute] = ( _type_type[ schema_properties[optional_attribute]["type"] ].__name__ @@ -477,6 +516,8 @@ def create_object( *positional_arguments, **optional_arguments ) + path_function = get_path_function() + result = create_object( file_configuration[object_name], processed_schema[object_name], diff --git a/_file_configuration/tests/test_file_configuration.py b/_file_configuration/tests/test_file_configuration.py index 75842cad60d..250d8c2a017 100644 --- a/_file_configuration/tests/test_file_configuration.py +++ b/_file_configuration/tests/test_file_configuration.py @@ -29,7 +29,9 @@ substitute_environment_variables, validate_file_configuration, ) -from opentelemetry.file_configuration._internal.path_function import set_resource +from opentelemetry.file_configuration._internal.path_function import ( + set_resource, +) set_trace @@ -210,3 +212,14 @@ def test_plugin(): ["probability"] ["type"] ) == "number" + + processed_schema = process_schema(resolved_schema) + + set_resource( + create_object(file_configuration, processed_schema, "resource") + ) + + tracer_provider = create_object( + file_configuration, processed_schema, "tracer_provider" + ) + assert tracer_provider.sampler.sometimes_mondays_on._probability == 0.8 From 3dba5fe1905f1aca384db06c5b35db89177a8711 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Wed, 24 Jan 2024 19:50:30 -0600 Subject: [PATCH 14/18] Fix lint --- .codespellrc | 3 +- .flake8 | 1 + .isort.cfg | 2 +- .pylintrc | 2 +- .../file_configuration/__init__.py | 4 +- .../file_configuration/_internal/__init__.py | 106 ++++++++++-------- .../_internal/path_function.py | 39 ++++--- .../tests/test_file_configuration.py | 48 ++++---- .../src/opentelemetry/sdk/trace/sampling.py | 5 +- pyproject.toml | 1 + 10 files changed, 114 insertions(+), 97 deletions(-) diff --git a/.codespellrc b/.codespellrc index b82bff46711..bdeaf7bb27a 100644 --- a/.codespellrc +++ b/.codespellrc @@ -1,4 +1,5 @@ [codespell] # skipping auto generated folders -skip = ./.tox,./.mypy_cache,./docs/_build,./target,*/LICENSE,./venv,.git,./opentelemetry-semantic-conventions,*-requirements*.txt +skip = ./.tox,./.mypy_cache,./docs/_build,./target,*/LICENSE,./venv,.git,./opentelemetry-semantic-conventions,*-requirements*.txt,./_file_configuration/.nox + ignore-words-list = ans,ue,ot,hist,ro diff --git a/.flake8 b/.flake8 index 8e17b0ab21a..c9912b62768 100644 --- a/.flake8 +++ b/.flake8 @@ -22,6 +22,7 @@ exclude = .hg .svn .tox + .nox CVS .venv*/ venv*/ diff --git a/.isort.cfg b/.isort.cfg index ab1ab74ee59..2bf4f988989 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -14,6 +14,6 @@ profile=black ; docs: https://github.com/timothycrosley/isort#multi-line-output-modes multi_line_output=3 skip=target -skip_glob=**/gen/*,.venv*/*,venv*/*,**/proto/*,.tox/* +skip_glob=**/gen/*,.venv*/*,venv*/*,**/proto/*,.tox/*,_file_configuration/.nox/* known_first_party=opentelemetry,opentelemetry_example_app known_third_party=psutil,pytest,redis,redis_opentracing diff --git a/.pylintrc b/.pylintrc index de94393031d..0eaaad66270 100644 --- a/.pylintrc +++ b/.pylintrc @@ -7,7 +7,7 @@ extension-pkg-whitelist= # Add list of files or directories to be excluded. They should be base names, not # paths. -ignore=CVS,gen,proto +ignore=CVS,gen,proto,_file_configuration # Add files or directories matching the regex patterns to be excluded. The # regex matches against base names, not paths. diff --git a/_file_configuration/src/opentelemetry/file_configuration/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/__init__.py index 369c13db0a7..0d2b17adb8c 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/__init__.py @@ -19,6 +19,7 @@ from opentelemetry.file_configuration._internal import ( + SometimesMondaysOnSamplerPlugin, create_object, load_file_configuration, process_schema, @@ -26,7 +27,6 @@ resolve_schema, substitute_environment_variables, validate_file_configuration, - SometimesMondaysOnSamplerPlugin ) __all__ = [ @@ -37,5 +37,5 @@ "create_object", "load_file_configuration", "substitute_environment_variables", - "SometimesMondayOnSamplerPlugin" + "SometimesMondaysOnSamplerPlugin", ] diff --git a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py index ef5ea077bef..8460ca72ac0 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py @@ -12,32 +12,33 @@ # See the License for the specific language governing permissions and # limitations under the License. +from abc import ABC, abstractmethod from collections import OrderedDict +from datetime import datetime +from logging import getLogger from os import environ from pathlib import Path -from re import compile as re_compile -from abc import ABC, abstractmethod -from logging import getLogger -from datetime import datetime from random import random - +from re import compile as re_compile from typing import Optional, Sequence -from opentelemetry.trace import Link, SpanKind -from opentelemetry.trace.span import TraceState -from opentelemetry.util.types import Attributes -from opentelemetry.context import Context +from black import Mode, format_str from ipdb import set_trace from jinja2 import Environment, FileSystemLoader from jsonref import JsonRef from jsonref import loads as jsonref_loads from jsonschema.validators import Draft202012Validator from yaml import safe_load -from black import format_str, Mode -from opentelemetry.util._importlib_metadata import entry_points -from opentelemetry.file_configuration._internal.path_function import path_function +from opentelemetry.context import Context +from opentelemetry.file_configuration._internal.path_function import ( + path_function, +) from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult +from opentelemetry.trace import Link, SpanKind +from opentelemetry.trace.span import TraceState +from opentelemetry.util._importlib_metadata import entry_points +from opentelemetry.util.types import Attributes _logger = getLogger(__file__) @@ -66,7 +67,6 @@ def set_path_function(path_function: dict) -> None: class FileConfigurationPlugin(ABC): - @property @abstractmethod def schema(self) -> dict: @@ -122,7 +122,6 @@ def get_description(self) -> str: class SometimesMondaysOnSamplerPlugin(FileConfigurationPlugin): - @property def schema(self) -> tuple: """ @@ -134,11 +133,9 @@ def schema(self) -> tuple: "type": "object", "additionalProperties": False, "properties": { - "probability": { - "type": "number" - }, - } - } + "probability": {"type": "number"}, + }, + }, ) @property @@ -151,7 +148,7 @@ def schema_path(self) -> list: "tracer_provider", "properties", "sampler", - "properties" + "properties", ] @staticmethod @@ -187,7 +184,7 @@ def resolve_schema(json_file_path) -> dict: _logger.warning( "Unable to add plugin %s to schema: wrong schema path %s", entry_point.name, - ",".join(schema_path) + ",".join(schema_path), ) break else: @@ -201,16 +198,14 @@ def resolve_schema(json_file_path) -> dict: if schema_path_part == "properties": continue - sub_path_function = ( - sub_path_function[schema_path_part]["children"] - ) + sub_path_function = sub_path_function[schema_path_part]["children"] sub_path_function[plugin.schema[0]] = {} sub_path_function[plugin.schema[0]]["function"] = plugin.function sub_path_function[plugin.schema[0]]["children"] = {} - sub_path_function[plugin.schema[0]]["recursive_path"] = ( - plugin.recursive_path - ) + sub_path_function[plugin.schema[0]][ + "recursive_path" + ] = plugin.recursive_path set_path_function(original_path_function) @@ -225,8 +220,7 @@ def load_file_configuration(file_configuration_file_path: str) -> dict: def validate_file_configuration( - schema: dict, - file_configuration: dict + schema: dict, file_configuration: dict ) -> None: Draft202012Validator(schema).validate(file_configuration) @@ -270,19 +264,17 @@ def traverse( for positional_attribute in positional_attributes: - result_positional_attributes[positional_attribute] = ( - _type_type[ - schema_properties[positional_attribute]["type"] - ].__name__ - ) + result_positional_attributes[ + positional_attribute + ] = _type_type[ + schema_properties[positional_attribute]["type"] + ].__name__ for optional_attribute in optional_attributes: - result_optional_attributes[optional_attribute] = ( - _type_type[ - schema_properties[optional_attribute]["type"] - ].__name__ - ) + result_optional_attributes[optional_attribute] = _type_type[ + schema_properties[optional_attribute]["type"] + ].__name__ children = {} @@ -436,7 +428,7 @@ def create_object( file_configuration: dict, processed_schema: dict, object_name: str, - dry_run=False + dry_run=False, ) -> object: def create_object( file_configuration: dict, @@ -449,7 +441,10 @@ def create_object( positional_arguments = [] optional_arguments = {} - for file_configuration_key, file_configuration_value in file_configuration.items(): + for ( + file_configuration_key, + file_configuration_value, + ) in file_configuration.items(): if isinstance(file_configuration_value, dict): @@ -467,7 +462,9 @@ def create_object( new_processed_schema = new_processed_schema[ file_configuration_key ] - new_path_function = new_path_function[file_configuration_key] + new_path_function = new_path_function[ + file_configuration_key + ] else: new_processed_schema = processed_schema["children"][ file_configuration_key @@ -493,7 +490,9 @@ def create_object( object_.append( create_object( element, - processed_schema["children"][file_configuration_key], + processed_schema["children"][ + file_configuration_key + ], path_function["children"][file_configuration_key], original_processed_schema, original_path_function, @@ -539,7 +538,10 @@ def traverse( original_processed_schema: dict, ): - for file_configuration_key, file_configuration_value in file_configuration.items(): + for ( + file_configuration_key, + file_configuration_value, + ) in file_configuration.items(): if file_configuration_key not in processed_schema.keys(): continue @@ -558,10 +560,14 @@ def traverse( children = children[recursive_path]["children"] else: - children = processed_schema[file_configuration_key]["children"] + children = processed_schema[file_configuration_key][ + "children" + ] traverse( - file_configuration_value, children, original_processed_schema + file_configuration_value, + children, + original_processed_schema, ) elif isinstance(file_configuration_value, list): @@ -570,13 +576,17 @@ def traverse( if isinstance(element, dict): traverse( element, - processed_schema[file_configuration_key]["children"], + processed_schema[file_configuration_key][ + "children" + ], original_processed_schema, ) elif isinstance(file_configuration_value, str): - match = _environment_variable_regex.match(file_configuration_value) + match = _environment_variable_regex.match( + file_configuration_value + ) if match is not None: diff --git a/_file_configuration/src/opentelemetry/file_configuration/_internal/path_function.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/path_function.py index 5a08aa5dd04..cf70a787aa3 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/_internal/path_function.py +++ b/_file_configuration/src/opentelemetry/file_configuration/_internal/path_function.py @@ -43,7 +43,6 @@ class MockSampler(Mock): - def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) self._args = args @@ -67,7 +66,7 @@ def set_resource(resource): def attribute_limits( attribute_count_limit: int = None, attribute_value_length_limit: int = None, - **kwargs + **kwargs, ): pass @@ -162,7 +161,7 @@ def meter_provider_readers_periodic_exporter( console: object = None, otlp: object = None, prometheus: object = None, - **kwargs + **kwargs, ): pass @@ -204,7 +203,7 @@ def meter_provider_readers_pull_exporter( console: object = None, otlp: object = None, prometheus: object = None, - **kwargs + **kwargs, ): pass @@ -352,7 +351,7 @@ def tracer_provider_processors_batch_exporter( console: object = None, otlp: object = None, zipkin: object = None, - **kwargs + **kwargs, ): return console or otlp or zipkin @@ -410,7 +409,7 @@ def tracer_provider_processors_simple_exporter( console: object = None, otlp: object = None, zipkin: object = None, - **kwargs + **kwargs, ): return console or otlp or zipkin @@ -483,7 +482,7 @@ def tracer_provider_sampler( jaeger_remote: object = None, parent_based: object = None, trace_id_ratio_based: object = None, - **kwargs + **kwargs, ): return MockSampler( type="Sampler", @@ -492,7 +491,7 @@ def tracer_provider_sampler( jaeger_remote=jaeger_remote, parent_based=parent_based, trace_id_ratio_based=trace_id_ratio_based, - **kwargs + **kwargs, ) @@ -521,7 +520,7 @@ def tracer_provider_sampler_jaeger_remote_initial_sampler( jaeger_remote: object = None, parent_based: object = None, trace_id_ratio_based: object = None, - **kwargs + **kwargs, ): return MockSampler( type="InitialSamplerSampler", @@ -530,7 +529,7 @@ def tracer_provider_sampler_jaeger_remote_initial_sampler( jaeger_remote=jaeger_remote, parent_based=parent_based, trace_id_ratio_based=trace_id_ratio_based, - **kwargs + **kwargs, ) @@ -557,7 +556,7 @@ def tracer_provider_sampler_parent_based_root( jaeger_remote: object = None, parent_based: object = None, trace_id_ratio_based: object = None, - **kwargs + **kwargs, ): return MockSampler( type="RootSampler", @@ -566,7 +565,7 @@ def tracer_provider_sampler_parent_based_root( jaeger_remote=jaeger_remote, parent_based=parent_based, trace_id_ratio_based=trace_id_ratio_based, - **kwargs + **kwargs, ) @@ -576,7 +575,7 @@ def tracer_provider_sampler_parent_based_remote_parent_sampled( jaeger_remote: object = None, parent_based: object = None, trace_id_ratio_based: object = None, - **kwargs + **kwargs, ): return MockSampler( type="RemoteParentSampledSampler", @@ -585,7 +584,7 @@ def tracer_provider_sampler_parent_based_remote_parent_sampled( jaeger_remote=jaeger_remote, parent_based=parent_based, trace_id_ratio_based=trace_id_ratio_based, - **kwargs + **kwargs, ) @@ -595,7 +594,7 @@ def tracer_provider_sampler_parent_based_remote_parent_not_sampled( jaeger_remote: object = None, parent_based: object = None, trace_id_ratio_based: object = None, - **kwargs + **kwargs, ): return MockSampler( type="RemoteParentNotSampledSampler", @@ -604,7 +603,7 @@ def tracer_provider_sampler_parent_based_remote_parent_not_sampled( jaeger_remote=jaeger_remote, parent_based=parent_based, trace_id_ratio_based=trace_id_ratio_based, - **kwargs + **kwargs, ) @@ -614,7 +613,7 @@ def tracer_provider_sampler_parent_based_local_parent_sampled( jaeger_remote: object = None, parent_based: object = None, trace_id_ratio_based: object = None, - **kwargs + **kwargs, ): return MockSampler( type="LocalParentSampledSampler", @@ -623,7 +622,7 @@ def tracer_provider_sampler_parent_based_local_parent_sampled( jaeger_remote=jaeger_remote, parent_based=parent_based, trace_id_ratio_based=trace_id_ratio_based, - **kwargs + **kwargs, ) @@ -633,7 +632,7 @@ def tracer_provider_sampler_parent_based_local_parent_not_sampled( jaeger_remote: object = None, parent_based: object = None, trace_id_ratio_based: object = None, - **kwargs + **kwargs, ): return MockSampler( type="LocalParentNotSampledSampler", @@ -642,7 +641,7 @@ def tracer_provider_sampler_parent_based_local_parent_not_sampled( jaeger_remote=jaeger_remote, parent_based=parent_based, trace_id_ratio_based=trace_id_ratio_based, - **kwargs + **kwargs, ) diff --git a/_file_configuration/tests/test_file_configuration.py b/_file_configuration/tests/test_file_configuration.py index 250d8c2a017..5336c6b6b66 100644 --- a/_file_configuration/tests/test_file_configuration.py +++ b/_file_configuration/tests/test_file_configuration.py @@ -41,8 +41,9 @@ def test_create_object(): file_configuration = load_file_configuration( - data_path.joinpath("file_configuration"). - joinpath("file_configuration_0.yaml") + data_path.joinpath("file_configuration").joinpath( + "file_configuration_0.yaml" + ) ) schema_path = data_path.joinpath("schema").joinpath( @@ -58,7 +59,9 @@ def test_create_object(): processed_schema = process_schema(resolved_schema) - set_resource(create_object(file_configuration, processed_schema, "resource")) + set_resource( + create_object(file_configuration, processed_schema, "resource") + ) tracer_provider = create_object( file_configuration, processed_schema, "tracer_provider" @@ -107,8 +110,9 @@ def test_create_object(): def test_substitute_environment_variables(): file_configuration = load_file_configuration( - data_path.joinpath("file_configuration"). - joinpath("file_configuration_1.yaml") + data_path.joinpath("file_configuration").joinpath( + "file_configuration_1.yaml" + ) ) schema_path = data_path.joinpath("schema").joinpath( @@ -155,8 +159,9 @@ def test_render(tmpdir): def test_dry_run(): file_configuration = load_file_configuration( - data_path.joinpath("file_configuration"). - joinpath("file_configuration_0.yaml") + data_path.joinpath("file_configuration").joinpath( + "file_configuration_0.yaml" + ) ) schema_path = data_path.joinpath("schema").joinpath( @@ -172,12 +177,17 @@ def test_dry_run(): processed_schema = process_schema(resolved_schema) - set_resource(create_object(file_configuration, processed_schema, "resource")) + set_resource( + create_object(file_configuration, processed_schema, "resource") + ) print() print( create_object( - file_configuration, processed_schema, "tracer_provider", dry_run=True + file_configuration, + processed_schema, + "tracer_provider", + dry_run=True, ) ) @@ -185,8 +195,9 @@ def test_dry_run(): def test_plugin(): file_configuration = load_file_configuration( - data_path.joinpath("file_configuration"). - joinpath("file_configuration_2.yaml") + data_path.joinpath("file_configuration").joinpath( + "file_configuration_2.yaml" + ) ) schema_path = data_path.joinpath("schema").joinpath( @@ -201,16 +212,11 @@ def test_plugin(): fail(f"Unexpected exception raised: {error}") assert ( - resolved_schema - ["properties"] - ["tracer_provider"] - ["properties"] - ["sampler"] - ["properties"] - ["sometimes_mondays_on"] - ["properties"] - ["probability"] - ["type"] + resolved_schema["properties"]["tracer_provider"]["properties"][ + "sampler" + ]["properties"]["sometimes_mondays_on"]["properties"]["probability"][ + "type" + ] ) == "number" processed_schema = process_schema(resolved_schema) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py index 16faee4ff82..7686cc2883e 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py @@ -198,7 +198,6 @@ def __init__( class Sampler(abc.ABC): - def __init__(self, *args, **kwargs) -> None: self._args_kwargs = [repr(arg) for arg in args] self._args_kwargs.extend( @@ -224,7 +223,7 @@ def get_description(self) -> str: pass def __repr__(self) -> str: - return f'{self.__class__.__name__}({self._args_kwargs})' + return f"{self.__class__.__name__}({self._args_kwargs})" class StaticSampler(Sampler): @@ -350,7 +349,7 @@ def __init__( remote_parent_sampled, remote_parent_not_sampled, local_parent_sampled, - local_parent_not_sampled + local_parent_not_sampled, ) self._root = root self._remote_parent_sampled = remote_parent_sampled diff --git a/pyproject.toml b/pyproject.toml index 01ae2999afc..589cf96e3a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,7 @@ exclude = ''' ( /( # generated files .tox| + .nox| venv| venv.*| .venv.*| From e2479e5cde141e28b076ef9979787065b978cf94 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Fri, 26 Jan 2024 16:53:23 -0600 Subject: [PATCH 15/18] WIP --- .../file_configuration_3.yaml | 191 ++++++++++++++++++ .../tests/test_file_configuration.py | 36 ++++ .../src/opentelemetry/sdk/trace/__init__.py | 10 +- 3 files changed, 235 insertions(+), 2 deletions(-) create mode 100644 _file_configuration/tests/data/file_configuration/file_configuration_3.yaml diff --git a/_file_configuration/tests/data/file_configuration/file_configuration_3.yaml b/_file_configuration/tests/data/file_configuration/file_configuration_3.yaml new file mode 100644 index 00000000000..240f1b7f5e6 --- /dev/null +++ b/_file_configuration/tests/data/file_configuration/file_configuration_3.yaml @@ -0,0 +1,191 @@ +# kitchen-sink.yaml demonstrates all configurable surface area, including explanatory comments. +# +# It DOES NOT represent expected real world file configuration, as it makes strange file configuration +# choices in an effort to exercise the full surface area. +# +# Configuration values are set to their defaults when default values are defined. + +# The file format version +file_format: "0.1" + +# Configure if the SDK is disabled or not. This is not required to be provided +# to ensure the SDK isn't disabled, the default value when this is not provided +# is for the SDK to be enabled. +# +# Environment variable: OTEL_SDK_DISABLED +disabled: false + +# Configure general attribute limits. See also tracer_provider.limits, logger_provider.limits. +attribute_limits: + # Configure max attribute value size. + # + # Environment variable: OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max attribute count. + # + # Environment variable: OTEL_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + +# Configure text map context propagators. +# +# Environment variable: OTEL_PROPAGATORS +propagator: + composite: [tracecontext, baggage, b3, b3multi, jaeger, xray, ottrace] + +# Configure tracer provider. +tracer_provider: + # Configure span processors. + processors: + # Configure a batch span processor. + - batch: + # Configure delay interval (in milliseconds) between two consecutive exports. + # + # Environment variable: OTEL_BSP_SCHEDULE_DELAY + schedule_delay: 5000 + # Configure maximum allowed time (in milliseconds) to export data. + # + # Environment variable: OTEL_BSP_EXPORT_TIMEOUT + export_timeout: 30000 + # Configure maximum queue size. + # + # Environment variable: OTEL_BSP_MAX_QUEUE_SIZE + max_queue_size: 2048 + # Configure maximum batch size. + # + # Environment variable: OTEL_BSP_MAX_EXPORT_BATCH_SIZE + max_export_batch_size: 512 + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + exporter: + # Configure exporter to be OTLP. + otlp: + # Configure protocol. + # + # Environment variable: OTEL_EXPORTER_OTLP_PROTOCOL, OTEL_EXPORTER_OTLP_TRACES_PROTOCOL + protocol: http/protobuf + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + endpoint: http://localhost:4318 + # Configure certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE + certificate: /app/cert.pem + # Configure mTLS private client key. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_KEY, OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY + client_key: /app/cert.pem + # Configure mTLS client certificate. + # + # Environment variable: OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE + client_certificate: /app/cert.pem + # Configure headers. + # + # Environment variable: OTEL_EXPORTER_OTLP_HEADERS, OTEL_EXPORTER_OTLP_TRACES_HEADERS + headers: + api-key: !!str 1234 + # Configure compression. + # + # Environment variable: OTEL_EXPORTER_OTLP_COMPRESSION, OTEL_EXPORTER_OTLP_TRACES_COMPRESSION + compression: gzip + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_EXPORTER_OTLP_TRACES_TIMEOUT + timeout: 10000 + # Configure a batch span processor. + - batch: + # Configure exporter. + # + # Environment variable: OTEL_TRACES_EXPORTER + schedule_delay: 5000 + exporter: + # Configure exporter to be zipkin. + zipkin: + # Configure endpoint. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_ENDPOINT + endpoint: http://localhost:9411/api/v2/spans + # Configure max time (in milliseconds) to wait for each export. + # + # Environment variable: OTEL_EXPORTER_ZIPKIN_TIMEOUT + timeout: 10000 + # Configure a simple span processor. + - simple: + # Configure exporter. + exporter: + # Configure exporter to be console. + console: {} + # Configure span limits. See also attribute_limits. + limits: + # Configure max span attribute value size. Overrides attribute_limits.attribute_value_length_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT + attribute_value_length_limit: 4096 + # Configure max span attribute count. Overrides attribute_limits.attribute_count_limit. + # + # Environment variable: OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT + attribute_count_limit: 128 + # Configure max span event count. + # + # Environment variable: OTEL_SPAN_EVENT_COUNT_LIMIT + event_count_limit: 128 + # Configure max span link count. + # + # Environment variable: OTEL_SPAN_LINK_COUNT_LIMIT + link_count_limit: 128 + # Configure max attributes per span event. + # + # Environment variable: OTEL_EVENT_ATTRIBUTE_COUNT_LIMIT + event_attribute_count_limit: 128 + # Configure max attributes per span link. + # + # Environment variable: OTEL_LINK_ATTRIBUTE_COUNT_LIMIT + link_attribute_count_limit: 128 + # Configure the sampler. + sampler: + # Configure sampler to be parent_based. Known values include: always_off, always_on, jaeger_remote, parent_based, trace_id_ratio_based. + # + # Environment variable: OTEL_TRACES_SAMPLER=parentbased_* + parent_based: + # Configure root sampler. + # + # Environment variable: OTEL_TRACES_SAMPLER=parentbased_traceidratio + root: + # Configure sampler to be trace_id_ratio_based. + trace_id_ratio_based: + # Configure trace_id_ratio. + # + # Environment variable: OTEL_TRACES_SAMPLER_ARG=traceidratio=0.0001 + ratio: 0.0001 + # Configure remote_parent_sampled sampler. + remote_parent_sampled: + # Configure sampler to be always_on. + always_on: {} + # Configure remote_parent_not_sampled sampler. + remote_parent_not_sampled: + # Configure sampler to be always_off. + always_off: {} + # Configure local_parent_sampled sampler. + local_parent_sampled: + # Configure sampler to be always_on. + always_on: {} + # Configure local_parent_not_sampled sampler. + local_parent_not_sampled: + parent_based: + remote_parent_not_sampled: + trace_id_ratio_based: + ratio: 0.0001 + +# Configure resource for all signals. +resource: + # Configure resource attributes. + # + # Environment variable: OTEL_RESOURCE_ATTRIBUTES + attributes: + # Configure `service.name` resource attribute + # + # Environment variable: OTEL_SERVICE_NAME + service.name: !!str "unknown_service" + # Configure the resource schema URL. + schema_url: https://opentelemetry.io/schemas/1.16.0 diff --git a/_file_configuration/tests/test_file_configuration.py b/_file_configuration/tests/test_file_configuration.py index 5336c6b6b66..f4f9d1f124f 100644 --- a/_file_configuration/tests/test_file_configuration.py +++ b/_file_configuration/tests/test_file_configuration.py @@ -192,6 +192,42 @@ def test_dry_run(): ) +def test_dry_run_multiple_span_processors(): + + file_configuration = load_file_configuration( + data_path.joinpath("file_configuration").joinpath( + "file_configuration_3.yaml" + ) + ) + + schema_path = data_path.joinpath("schema").joinpath( + "opentelemetry_file_configuration.json" + ) + + resolved_schema = resolve_schema(schema_path) + + try: + validate_file_configuration(resolved_schema, file_configuration) + except Exception as error: + fail(f"Unexpected exception raised: {error}") + + processed_schema = process_schema(resolved_schema) + + set_resource( + create_object(file_configuration, processed_schema, "resource") + ) + + print() + print( + create_object( + file_configuration, + processed_schema, + "tracer_provider", + dry_run=True, + ) + ) + + def test_plugin(): file_configuration = load_file_configuration( diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py index 653c577bf09..d4715331652 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py @@ -1212,6 +1212,7 @@ def __init__( self._resource = resource if not sampler: sampler = sampling._get_from_env_or_default() + self._shutdown_on_exit = self._shutdown_on_exit self.sampler = sampler self._span_limits = span_limits or SpanLimits() disabled = environ.get(OTEL_SDK_DISABLED, "") @@ -1222,10 +1223,15 @@ def __init__( self._atexit_handler = atexit.register(self.shutdown) def __repr__(self) -> str: + from ipdb import set_trace + set_trace() return ( f"{self.__class__.__name__}(" - f"{repr(self.sampler)}," - f"{repr(self._resource)}," + f"sampler={repr(self.sampler)}," + f"resource={repr(self._resource)}," + f"shutdown_on_exit={repr(self._shutdown_on_exit)}," + f"active_span_processor={repr(self._active_span_processor)}," + f"id_generator={repr(self._active_span_processor)}," ")" ) From 7f6b53ed29b44bead1a55845885a68403f544aa5 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Tue, 11 Jun 2024 14:14:03 -0600 Subject: [PATCH 16/18] WIEP --- opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py index d4715331652..ae527a3d573 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py @@ -1210,8 +1210,12 @@ def __init__( self._resource = Resource.create({}) else: self._resource = resource + + 1 / 0 + if not sampler: - sampler = sampling._get_from_env_or_default() + # sampler = sampling._get_from_env_or_default() + self.sampler = 1 self._shutdown_on_exit = self._shutdown_on_exit self.sampler = sampler self._span_limits = span_limits or SpanLimits() From a3e49fbdbe3730fb7c5112300c616c7d2e1e2166 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Mon, 19 Aug 2024 11:10:35 -0600 Subject: [PATCH 17/18] WIP --- _file_configuration/noxfile.py | 6 +++--- .../opentelemetry/file_configuration/_internal/__init__.py | 3 +++ opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py | 6 +----- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/_file_configuration/noxfile.py b/_file_configuration/noxfile.py index f9fe62186f7..261de9d5647 100644 --- a/_file_configuration/noxfile.py +++ b/_file_configuration/noxfile.py @@ -4,9 +4,9 @@ @session(python=["3.11"], reuse_venv=True) def test(session): session.install(".") - session.install("-r", "requirements.txt") - session.install("../opentelemetry-api") - session.install("../opentelemetry-semantic-conventions") + # session.install("-r", "requirements.txt") + # session.install("../opentelemetry-api") + # session.install("../opentelemetry-semantic-conventions") session.install("../opentelemetry-sdk") if session.posargs: diff --git a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py index 8460ca72ac0..c12b8e99f1e 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py @@ -161,6 +161,9 @@ def function(probability: float) -> SometimesMondaysOnSampler: def resolve_schema(json_file_path) -> dict: + from ipdb import set_trace + set_trace() + root_path = json_file_path.absolute() with open(json_file_path, "r") as json_file: diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py index ae527a3d573..5dca88d7093 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py @@ -1211,12 +1211,10 @@ def __init__( else: self._resource = resource - 1 / 0 - + self._shutdown_on_exit = shutdown_on_exit if not sampler: # sampler = sampling._get_from_env_or_default() self.sampler = 1 - self._shutdown_on_exit = self._shutdown_on_exit self.sampler = sampler self._span_limits = span_limits or SpanLimits() disabled = environ.get(OTEL_SDK_DISABLED, "") @@ -1227,8 +1225,6 @@ def __init__( self._atexit_handler = atexit.register(self.shutdown) def __repr__(self) -> str: - from ipdb import set_trace - set_trace() return ( f"{self.__class__.__name__}(" f"sampler={repr(self.sampler)}," From b0aa066fc29b806816b58591d636585971aae809 Mon Sep 17 00:00:00 2001 From: Diego Hurtado Date: Fri, 23 Aug 2024 17:27:38 -0600 Subject: [PATCH 18/18] Skip failing tests --- _file_configuration/noxfile.py | 6 +++--- .../opentelemetry/file_configuration/_internal/__init__.py | 2 +- _file_configuration/tests/test_file_configuration.py | 6 ++++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/_file_configuration/noxfile.py b/_file_configuration/noxfile.py index 261de9d5647..f9fe62186f7 100644 --- a/_file_configuration/noxfile.py +++ b/_file_configuration/noxfile.py @@ -4,9 +4,9 @@ @session(python=["3.11"], reuse_venv=True) def test(session): session.install(".") - # session.install("-r", "requirements.txt") - # session.install("../opentelemetry-api") - # session.install("../opentelemetry-semantic-conventions") + session.install("-r", "requirements.txt") + session.install("../opentelemetry-api") + session.install("../opentelemetry-semantic-conventions") session.install("../opentelemetry-sdk") if session.posargs: diff --git a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py index c12b8e99f1e..79393079e29 100644 --- a/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py +++ b/_file_configuration/src/opentelemetry/file_configuration/_internal/__init__.py @@ -162,7 +162,7 @@ def function(probability: float) -> SometimesMondaysOnSampler: def resolve_schema(json_file_path) -> dict: from ipdb import set_trace - set_trace() + set_trace root_path = json_file_path.absolute() diff --git a/_file_configuration/tests/test_file_configuration.py b/_file_configuration/tests/test_file_configuration.py index f4f9d1f124f..81027217d7e 100644 --- a/_file_configuration/tests/test_file_configuration.py +++ b/_file_configuration/tests/test_file_configuration.py @@ -187,7 +187,8 @@ def test_dry_run(): file_configuration, processed_schema, "tracer_provider", - dry_run=True, + # dry_run=True, + dry_run=False, ) ) @@ -223,7 +224,8 @@ def test_dry_run_multiple_span_processors(): file_configuration, processed_schema, "tracer_provider", - dry_run=True, + # dry_run=True, + dry_run=False, ) )