diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2f41b7c8..427a7741 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -322,6 +322,8 @@ make all ## Meta schema +> 🚧 The tooling around meta schema is under construction, and the docs below will be temporarily inaccurate. Please check back soon. 🚧 + [meta_schema_*.yaml](schema) files track schema details that don't fit neatly into the JSON schema including: * Property descriptions and semantics @@ -342,7 +344,7 @@ There are a variety of build tasks which intersect with the meta schema: ### `meta_schema_types.yaml` -[meta_schema_types.yaml](schema/meta_schema_types.yaml) contains property descriptions, semantics, enum value descriptions, and SDK extension plugin information. +`meta_schema_types.yaml` (TODO: update docs after this was merged into source schema) contains property descriptions, semantics, enum value descriptions, and SDK extension plugin information. Content looks like: @@ -421,7 +423,7 @@ Ensures that the JSON schema and the meta schema are kept in sync: * For each meta schema type: * If a property exists in the JSON schema and not the meta schema, add it. * If a property exists in the meta schema and not the JSON schema, delete it. -* If a language implementation is known (i.e. defined in constant array `KNOWN_LANGUAGES` in [meta-schema.js](./scripts/meta-schema.js)) but not in meta schema, add it. +* If a language implementation is known (i.e. defined in constant array `KNOWN_LANGUAGES` in [language-implementations.js](scripts/language-implementations.js)) but not in meta schema, add it. * If a language implementation exists in meta schema but is not known, delete it. * For each language implementation: * If a type exists in the JSON schema and not in the language implementation's type support status of the meta schema, add it. diff --git a/Makefile b/Makefile index bfeaa365..46f3fdc4 100644 --- a/Makefile +++ b/Makefile @@ -1,14 +1,14 @@ # Copyright The OpenTelemetry Authors # SPDX-License-Identifier: Apache-2.0 -include validator/Makefile - EXAMPLE_FILES := $(shell find . -path './examples/*.yaml' -exec basename {} \; | sort) $(shell mkdir -p out) .PHONY: all all: install-tools compile-schema validate-examples all-meta-schema +include validator/Makefile + .PHONY: validate-examples validate-examples: @if ! npm ls ajv-cli; then npm install; fi @@ -25,16 +25,16 @@ update-file-format: sed -e 's/file_format:.*/file_format: \"$(FILE_FORMAT)\"/g' -i '' ./examples/$$f; \ done -.PHONY: fix-meta-schema -fix-meta-schema: - npm run-script fix-meta-schema || exit 1; \ +.PHONY: fix-language-implementations +fix-language-implementations: + npm run-script fix-language-implementations || exit 1; \ .PHONY: generate-markdown generate-markdown: npm run-script generate-markdown || exit 1; \ .PHONY: all-meta-schema -all-meta-schema: fix-meta-schema generate-markdown +all-meta-schema: fix-language-implementations generate-markdown .PHONY: install-tools install-tools: diff --git a/Makefile.common b/Makefile.common index ee308009..e4e8dfa2 100644 --- a/Makefile.common +++ b/Makefile.common @@ -6,7 +6,7 @@ SCHEMA_FILES := $(shell find . -path './schema_out/*.json' -exec basename {} \; .PHONY: compile-schema compile-schema: @if ! npm ls minimatch yaml; then npm install; fi - npm run-script yaml-to-json || exit 1; + npm run-script compile-schema || exit 1; @if ! npm ls ajv-cli; then npm install; fi @for f in $(SCHEMA_FILES); do \ npx --no ajv-cli compile --spec=draft2020 --allow-matching-properties -s ./schema_out/$$f -r "./schema_out/!($$f)" \ diff --git a/package.json b/package.json index 21da570c..a6e56e8c 100644 --- a/package.json +++ b/package.json @@ -1,9 +1,9 @@ { "type": "module", "scripts": { - "yaml-to-json": "node scripts/yaml-to-json.js", + "compile-schema": "node scripts/compile-schema.js", "generate-markdown": "node scripts/generate-markdown.js", - "fix-meta-schema": "node scripts/fix-meta-schema.js" + "fix-language-implementations": "node scripts/fix-language-implementations.js" }, "devDependencies": { "ajv-cli": "5.0.0" diff --git a/schema-docs.md b/schema-docs.md index 9ddec4bc..1c27ea3d 100644 --- a/schema-docs.md +++ b/schema-docs.md @@ -1338,7 +1338,7 @@ Usages:
JSON Schema -[JSON Schema Source File](./schema/opentelemetry_configuration.yaml) +[JSON Schema Source File](./schema/logger_provider.yaml)
{
   "$id": "https://opentelemetry.io/otelconfig/logger_provider.json",
   "$schema": "https://json-schema.org/draft/2020-12/schema",
@@ -1775,7 +1775,7 @@ Usages:
 
JSON Schema -[JSON Schema Source File](./schema/opentelemetry_configuration.yaml) +[JSON Schema Source File](./schema/meter_provider.yaml)
{
   "$id": "https://opentelemetry.io/otelconfig/meter_provider.json",
   "$schema": "https://json-schema.org/draft/2020-12/schema",
@@ -3131,8 +3131,7 @@ Usages:
 | `compression` | one of:
* `string`
* `null`
| `false` | No constraints. | Configure compression.
Values include: gzip, none. Implementations may support other compression algorithms.
If omitted or null, none is used.
| | `default_histogram_aggregation` | [`ExporterDefaultHistogramAggregation`](#exporterdefaulthistogramaggregation) | `false` | No constraints. | Configure default histogram aggregation.
Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md.
If omitted or null, explicit_bucket_histogram is used.
| | `encoding` | [`OtlpHttpEncoding`](#otlphttpencoding) | `false` | No constraints. | Configure the encoding used for messages.
Values include: protobuf, json. Implementations may not support json.
If omitted or null, protobuf is used.
| -| `endpoint` | one of:
* `string`
* `null`
| `false` | No constraints. | Configure endpoint, including the signal specific path.
If omitted or null, the http://localhost:4318/v1/{signal} (where signal is 'traces', 'logs', or 'metrics') is used.
| -| `endpoint` | one of:
* `string`
* `null`
| `false` | No constraints. | Configure endpoint.
If omitted or null, http://localhost:4317 is used.
| +| `endpoint` | one of:
* `string`
* `null`
| `false` | No constraints. | Configure endpoint.
If omitted or null, http://localhost:4318/v1/metrics is used.
| | `headers` | `array` of [`NameStringValuePair`](#namestringvaluepair) | `false` | * `minItems`: `1`
| Configure headers. Entries have higher priority than entries from .headers_list.
If an entry's .value is null, the entry is ignored.
| | `headers_list` | one of:
* `string`
* `null`
| `false` | No constraints. | Configure headers. Entries have lower priority than entries from .headers.
The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details.
If omitted or null, no headers are added.
| | `temporality_preference` | [`ExporterTemporalityPreference`](#exportertemporalitypreference) | `false` | No constraints. | Configure temporality preference.
Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md.
If omitted or null, cumulative is used.
| @@ -3148,7 +3147,6 @@ Usages: | `default_histogram_aggregation` | supported | unknown | supported | unknown | | `encoding` | supported | unknown | not_implemented | unknown | | `endpoint` | supported | unknown | supported | unknown | -| `endpoint` | supported | unknown | supported | unknown | | `headers` | supported | unknown | supported | unknown | | `headers_list` | supported | unknown | supported | unknown | | `temporality_preference` | supported | unknown | supported | unknown | @@ -3383,7 +3381,7 @@ Usages:
JSON Schema -[JSON Schema Source File](./schema/opentelemetry_configuration.yaml) +[JSON Schema Source File](./schema/propagator.yaml)
{
   "$id": "https://opentelemetry.io/otelconfig/propagator.json",
   "$schema": "https://json-schema.org/draft/2020-12/schema",
@@ -3679,7 +3677,7 @@ Usages:
 
JSON Schema -[JSON Schema Source File](./schema/opentelemetry_configuration.yaml) +[JSON Schema Source File](./schema/resource.yaml)
{
   "$id": "https://opentelemetry.io/otelconfig/resource.json",
   "$schema": "https://json-schema.org/draft/2020-12/schema",
@@ -4426,7 +4424,7 @@ Usages:
 
JSON Schema -[JSON Schema Source File](./schema/opentelemetry_configuration.yaml) +[JSON Schema Source File](./schema/tracer_provider.yaml)
{
   "$id": "https://opentelemetry.io/otelconfig/tracer_provider.json",
   "$schema": "https://json-schema.org/draft/2020-12/schema",
@@ -5671,7 +5669,7 @@ Usages:
 
JSON Schema -[JSON Schema Source File](./schema/opentelemetry_configuration.yaml) +[JSON Schema Source File](./schema/instrumentation.yaml)
{
   "$id": "https://opentelemetry.io/otelconfig/instrumentation.json",
   "$schema": "https://json-schema.org/draft/2020-12/schema",
@@ -7083,7 +7081,7 @@ Latest supported file format: `1.0.0-rc.2`
 | [`OtlpGrpcMetricExporter`](#otlpgrpcmetricexporter) | supported |  | * `compression`: supported
* `default_histogram_aggregation`: supported
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `temporality_preference`: supported
* `timeout`: supported
* `tls`: supported
| | [`OtlpHttpEncoding`](#otlphttpencoding) | supported | | * `json`: supported
* `protobuf`: supported
| | [`OtlpHttpExporter`](#otlphttpexporter) | supported | | * `compression`: supported
* `encoding`: supported
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `timeout`: supported
* `tls`: supported
| -| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | supported | | * `compression`: supported
* `default_histogram_aggregation`: supported
* `encoding`: supported
* `endpoint`: supported
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `temporality_preference`: supported
* `timeout`: supported
* `tls`: supported
| +| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | supported | | * `compression`: supported
* `default_histogram_aggregation`: supported
* `encoding`: supported
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `temporality_preference`: supported
* `timeout`: supported
* `tls`: supported
| | [`ParentBasedSampler`](#parentbasedsampler) | supported | | * `local_parent_not_sampled`: supported
* `local_parent_sampled`: supported
* `remote_parent_not_sampled`: supported
* `remote_parent_sampled`: supported
* `root`: supported
| | [`PeriodicMetricReader`](#periodicmetricreader) | supported | | * `cardinality_limits`: supported
* `exporter`: supported
* `interval`: supported
* `producers`: supported
* `timeout`: supported
| | [`Propagator`](#propagator) | supported | | * `composite`: supported
* `composite_list`: supported
| @@ -7191,7 +7189,7 @@ Latest supported file format: `0.3.0` | [`OtlpGrpcMetricExporter`](#otlpgrpcmetricexporter) | unknown | | * `compression`: unknown
* `default_histogram_aggregation`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `temporality_preference`: unknown
* `timeout`: unknown
* `tls`: unknown
| | [`OtlpHttpEncoding`](#otlphttpencoding) | unknown | | * `json`: unknown
* `protobuf`: unknown
| | [`OtlpHttpExporter`](#otlphttpexporter) | unknown | | * `compression`: unknown
* `encoding`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `timeout`: unknown
* `tls`: unknown
| -| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | unknown | | * `compression`: unknown
* `default_histogram_aggregation`: unknown
* `encoding`: unknown
* `endpoint`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `temporality_preference`: unknown
* `timeout`: unknown
* `tls`: unknown
| +| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | unknown | | * `compression`: unknown
* `default_histogram_aggregation`: unknown
* `encoding`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `temporality_preference`: unknown
* `timeout`: unknown
* `tls`: unknown
| | [`ParentBasedSampler`](#parentbasedsampler) | unknown | | * `local_parent_not_sampled`: unknown
* `local_parent_sampled`: unknown
* `remote_parent_not_sampled`: unknown
* `remote_parent_sampled`: unknown
* `root`: unknown
| | [`PeriodicMetricReader`](#periodicmetricreader) | unknown | | * `cardinality_limits`: unknown
* `exporter`: unknown
* `interval`: unknown
* `producers`: unknown
* `timeout`: unknown
| | [`Propagator`](#propagator) | unknown | | * `composite`: unknown
* `composite_list`: unknown
| @@ -7299,7 +7297,7 @@ Latest supported file format: `1.0.0-rc.1` | [`OtlpGrpcMetricExporter`](#otlpgrpcmetricexporter) | supported | | * `compression`: supported
* `default_histogram_aggregation`: supported
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `temporality_preference`: supported
* `timeout`: supported
* `tls`: ignored
| | [`OtlpHttpEncoding`](#otlphttpencoding) | not_implemented | | * `json`: not_implemented
* `protobuf`: not_implemented
| | [`OtlpHttpExporter`](#otlphttpexporter) | supported | | * `compression`: supported
* `encoding`: not_implemented
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `timeout`: supported
* `tls`: ignored
| -| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | supported | | * `compression`: supported
* `default_histogram_aggregation`: supported
* `encoding`: not_implemented
* `endpoint`: supported
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `temporality_preference`: supported
* `timeout`: supported
* `tls`: ignored
| +| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | supported | | * `compression`: supported
* `default_histogram_aggregation`: supported
* `encoding`: not_implemented
* `endpoint`: supported
* `headers`: supported
* `headers_list`: supported
* `temporality_preference`: supported
* `timeout`: supported
* `tls`: ignored
| | [`ParentBasedSampler`](#parentbasedsampler) | supported | | * `local_parent_not_sampled`: supported
* `local_parent_sampled`: supported
* `remote_parent_not_sampled`: supported
* `remote_parent_sampled`: supported
* `root`: supported
| | [`PeriodicMetricReader`](#periodicmetricreader) | supported | | * `cardinality_limits`: supported
* `exporter`: supported
* `interval`: supported
* `producers`: not_implemented
* `timeout`: supported
| | [`Propagator`](#propagator) | supported | | * `composite`: supported
* `composite_list`: supported
| @@ -7407,7 +7405,7 @@ Latest supported file format: `1.0.0-rc.2` | [`OtlpGrpcMetricExporter`](#otlpgrpcmetricexporter) | unknown | | * `compression`: unknown
* `default_histogram_aggregation`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `temporality_preference`: unknown
* `timeout`: unknown
* `tls`: unknown
| | [`OtlpHttpEncoding`](#otlphttpencoding) | unknown | | * `json`: unknown
* `protobuf`: unknown
| | [`OtlpHttpExporter`](#otlphttpexporter) | unknown | | * `compression`: unknown
* `encoding`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `timeout`: unknown
* `tls`: unknown
| -| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | unknown | | * `compression`: unknown
* `default_histogram_aggregation`: unknown
* `encoding`: unknown
* `endpoint`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `temporality_preference`: unknown
* `timeout`: unknown
* `tls`: unknown
| +| [`OtlpHttpMetricExporter`](#otlphttpmetricexporter) | unknown | | * `compression`: unknown
* `default_histogram_aggregation`: unknown
* `encoding`: unknown
* `endpoint`: unknown
* `headers`: unknown
* `headers_list`: unknown
* `temporality_preference`: unknown
* `timeout`: unknown
* `tls`: unknown
| | [`ParentBasedSampler`](#parentbasedsampler) | unknown | | * `local_parent_not_sampled`: unknown
* `local_parent_sampled`: unknown
* `remote_parent_not_sampled`: unknown
* `remote_parent_sampled`: unknown
* `root`: unknown
| | [`PeriodicMetricReader`](#periodicmetricreader) | unknown | | * `cardinality_limits`: unknown
* `exporter`: unknown
* `interval`: unknown
* `producers`: unknown
* `timeout`: unknown
| | [`Propagator`](#propagator) | unknown | | * `composite`: unknown
* `composite_list`: unknown
| diff --git a/schema/common.yaml b/schema/common.yaml index 605d644c..e7398913 100644 --- a/schema/common.yaml +++ b/schema/common.yaml @@ -8,21 +8,35 @@ $defs: minItems: 1 items: type: string + description: | + Configure list of value patterns to include. + Values are evaluated to match as follows: + * If the value exactly matches. + * If the value matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. + If omitted, all values are included. excluded: type: array minItems: 1 items: type: string + description: | + Configure list of value patterns to exclude. Applies after .included (i.e. excluded has higher priority than included). + Values are evaluated to match as follows: + * If the value exactly matches. + * If the value matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. + If omitted, .included attributes are included. NameStringValuePair: type: object additionalProperties: false properties: name: type: string + description: The name of the pair. value: type: - string - "null" + description: The value of the pair. required: - name - value @@ -36,28 +50,51 @@ $defs: type: - string - "null" + description: | + Configure endpoint, including the signal specific path. + If omitted or null, the http://localhost:4318/v1/{signal} (where signal is 'traces', 'logs', or 'metrics') is used. tls: $ref: "#/$defs/HttpTls" + description: Configure TLS settings for the exporter. headers: type: array minItems: 1 items: $ref: "#/$defs/NameStringValuePair" + description: | + Configure headers. Entries have higher priority than entries from .headers_list. + If an entry's .value is null, the entry is ignored. headers_list: type: - string - "null" + description: | + Configure headers. Entries have lower priority than entries from .headers. + The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. + If omitted or null, no headers are added. compression: type: - string - "null" + description: | + Configure compression. + Values include: gzip, none. Implementations may support other compression algorithms. + If omitted or null, none is used. timeout: type: - integer - "null" minimum: 0 + description: | + Configure max time (in milliseconds) to wait for each export. + Value must be non-negative. A value of 0 indicates no limit (infinity). + If omitted or null, 10000 is used. encoding: $ref: "#/$defs/OtlpHttpEncoding" + description: | + Configure the encoding used for messages. + Values include: protobuf, json. Implementations may not support json. + If omitted or null, protobuf is used. OtlpHttpEncoding: type: - string @@ -65,6 +102,9 @@ $defs: enum: - protobuf - json + enumDescriptions: + json: Protobuf JSON encoding. + protobuf: Protobuf binary encoding. OtlpGrpcExporter: type: - object @@ -75,26 +115,45 @@ $defs: type: - string - "null" + description: | + Configure endpoint. + If omitted or null, http://localhost:4317 is used. tls: $ref: "#/$defs/GrpcTls" + description: Configure TLS settings for the exporter. headers: type: array minItems: 1 items: $ref: "#/$defs/NameStringValuePair" + description: | + Configure headers. Entries have higher priority than entries from .headers_list. + If an entry's .value is null, the entry is ignored. headers_list: type: - string - "null" + description: | + Configure headers. Entries have lower priority than entries from .headers. + The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. + If omitted or null, no headers are added. compression: type: - string - "null" + description: | + Configure compression. + Values include: gzip, none. Implementations may support other compression algorithms. + If omitted or null, none is used. timeout: type: - integer - "null" minimum: 0 + description: | + Configure max time (in milliseconds) to wait for each export. + Value must be non-negative. A value of 0 indicates no limit (infinity). + If omitted or null, 10000 is used. ExperimentalOtlpFileExporter: type: - object @@ -105,6 +164,10 @@ $defs: type: - string - "null" + description: | + Configure output stream. + Values include stdout, or scheme+destination. For example: file:///path/to/file.jsonl. + If omitted or null, stdout is used. ConsoleExporter: type: - object @@ -120,14 +183,26 @@ $defs: type: - string - "null" + description: | + Configure certificate used to verify a server's TLS credentials. + Absolute path to certificate file in PEM format. + If omitted or null, system default certificate verification is used for secure connections. key_file: type: - string - "null" + description: | + Configure mTLS private client key. + Absolute path to client key file in PEM format. If set, .client_certificate must also be set. + If omitted or null, mTLS is not used. cert_file: type: - string - "null" + description: | + Configure mTLS client certificate. + Absolute path to client certificate file in PEM format. If set, .client_key must also be set. + If omitted or null, mTLS is not used. GrpcTls: type: - object @@ -138,15 +213,31 @@ $defs: type: - string - "null" + description: | + Configure certificate used to verify a server's TLS credentials. + Absolute path to certificate file in PEM format. + If omitted or null, system default certificate verification is used for secure connections. key_file: type: - string - "null" + description: | + Configure mTLS private client key. + Absolute path to client key file in PEM format. If set, .client_certificate must also be set. + If omitted or null, mTLS is not used. cert_file: type: - string - "null" + description: | + Configure mTLS client certificate. + Absolute path to client certificate file in PEM format. If set, .client_key must also be set. + If omitted or null, mTLS is not used. insecure: type: - boolean - "null" + description: | + Configure client transport security for the exporter's connection. + Only applicable when .endpoint is provided without http or https scheme. Implementations may choose to ignore .insecure. + If omitted or null, false is used. diff --git a/schema/instrumentation.yaml b/schema/instrumentation.yaml index c9f84885..f81b3628 100644 --- a/schema/instrumentation.yaml +++ b/schema/instrumentation.yaml @@ -3,28 +3,62 @@ additionalProperties: false properties: general: $ref: "#/$defs/ExperimentalGeneralInstrumentation" + description: | + Configure general SemConv options that may apply to multiple languages and instrumentations. + Instrumenation may merge general config options with the language specific configuration at .instrumentation.. cpp: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: Configure C++ language-specific instrumentation libraries. dotnet: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure .NET language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. erlang: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure Erlang language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. go: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure Go language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. java: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure Java language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. js: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure JavaScript language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. php: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure PHP language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. python: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure Python language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. ruby: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure Ruby language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. rust: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure Rust language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. swift: $ref: "#/$defs/ExperimentalLanguageSpecificInstrumentation" + description: | + Configure Swift language-specific instrumentation libraries. + Each entry's key identifies a particular instrumentation library. The corresponding value configures it. $defs: ExperimentalGeneralInstrumentation: type: object @@ -32,8 +66,14 @@ $defs: properties: peer: $ref: "#/$defs/ExperimentalPeerInstrumentation" + description: | + Configure instrumentations following the peer semantic conventions. + See peer semantic conventions: https://opentelemetry.io/docs/specs/semconv/attributes-registry/peer/ http: $ref: "#/$defs/ExperimentalHttpInstrumentation" + description: | + Configure instrumentations following the http semantic conventions. + See http semantic conventions: https://opentelemetry.io/docs/specs/semconv/http/ ExperimentalPeerInstrumentation: type: object additionalProperties: false @@ -43,14 +83,21 @@ $defs: minItems: 1 items: $ref: "#/$defs/ExperimentalPeerServiceMapping" + description: | + Configure the service mapping for instrumentations following peer.service semantic conventions. + See peer.service semantic conventions: https://opentelemetry.io/docs/specs/semconv/general/attributes/#general-remote-service-attributes ExperimentalPeerServiceMapping: type: object additionalProperties: false properties: peer: type: string + description: | + The IP address to map. service: type: string + description: | + The logical name corresponding to the IP address of .peer. required: - peer - service @@ -63,10 +110,14 @@ $defs: minItems: 1 items: type: string + description: | + Configure headers to capture for outbound http requests. response_captured_headers: type: array items: type: string + description: | + Configure headers to capture for inbound http responses. ExperimentalHttpServerInstrumentation: type: object additionalProperties: false @@ -76,19 +127,25 @@ $defs: minItems: 1 items: type: string + description: | + Configure headers to capture for inbound http requests. response_captured_headers: type: array minItems: 1 items: type: string + description: | + Configure headers to capture for outbound http responses. ExperimentalHttpInstrumentation: type: object additionalProperties: false properties: client: $ref: "#/$defs/ExperimentalHttpClientInstrumentation" + description: Configure instrumentations following the http client semantic conventions. server: $ref: "#/$defs/ExperimentalHttpServerInstrumentation" + description: Configure instrumentations following the http server semantic conventions. ExperimentalLanguageSpecificInstrumentation: type: object additionalProperties: diff --git a/schema/logger_provider.yaml b/schema/logger_provider.yaml index d27bfcc8..a3d17c94 100644 --- a/schema/logger_provider.yaml +++ b/schema/logger_provider.yaml @@ -6,10 +6,14 @@ properties: minItems: 1 items: $ref: "#/$defs/LogRecordProcessor" + description: Configure log record processors. limits: $ref: "#/$defs/LogRecordLimits" + description: Configure log record limits. See also attribute_limits. logger_configurator/development: $ref: "#/$defs/ExperimentalLoggerConfigurator" + description: | + Configure loggers. required: - processors $defs: @@ -19,6 +23,7 @@ $defs: properties: exporter: $ref: "#/$defs/LogRecordExporter" + description: Configure exporter. required: - exporter BatchLogRecordProcessor: @@ -30,23 +35,38 @@ $defs: - integer - "null" minimum: 0 + description: | + Configure delay interval (in milliseconds) between two consecutive exports. + Value must be non-negative. + If omitted or null, 1000 is used. export_timeout: type: - integer - "null" minimum: 0 + description: | + Configure maximum allowed time (in milliseconds) to export data. + Value must be non-negative. A value of 0 indicates no limit (infinity). + If omitted or null, 30000 is used. max_queue_size: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure maximum queue size. Value must be positive. + If omitted or null, 2048 is used. max_export_batch_size: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure maximum batch size. Value must be positive. + If omitted or null, 512 is used. exporter: $ref: "#/$defs/LogRecordExporter" + description: Configure exporter. required: - exporter LogRecordExporter: @@ -60,12 +80,18 @@ $defs: properties: otlp_http: $ref: common.yaml#/$defs/OtlpHttpExporter + description: Configure exporter to be OTLP with HTTP transport. otlp_grpc: $ref: common.yaml#/$defs/OtlpGrpcExporter + description: Configure exporter to be OTLP with gRPC transport. otlp_file/development: $ref: common.yaml#/$defs/ExperimentalOtlpFileExporter + description: | + Configure exporter to be OTLP with file transport. console: $ref: common.yaml#/$defs/ConsoleExporter + description: Configure exporter to be console. + isSdkExtensionPlugin: true LogRecordLimits: type: object additionalProperties: false @@ -75,11 +101,19 @@ $defs: - integer - "null" minimum: 0 + description: | + Configure max attribute value size. Overrides .attribute_limits.attribute_value_length_limit. + Value must be non-negative. + If omitted or null, there is no limit. attribute_count_limit: type: - integer - "null" minimum: 0 + description: | + Configure max attribute count. Overrides .attribute_limits.attribute_count_limit. + Value must be non-negative. + If omitted or null, 128 is used. LogRecordProcessor: type: object additionalProperties: @@ -91,8 +125,11 @@ $defs: properties: batch: $ref: "#/$defs/BatchLogRecordProcessor" + description: Configure a batch log record processor. simple: $ref: "#/$defs/SimpleLogRecordProcessor" + description: Configure a simple log record processor. + isSdkExtensionPlugin: true ExperimentalLoggerConfigurator: type: - object @@ -100,11 +137,13 @@ $defs: properties: default_config: $ref: "#/$defs/ExperimentalLoggerConfig" + description: Configure the default logger config used there is no matching entry in .logger_configurator/development.loggers. loggers: type: array minItems: 1 items: $ref: "#/$defs/ExperimentalLoggerMatcherAndConfig" + description: Configure loggers. ExperimentalLoggerMatcherAndConfig: type: - object @@ -113,8 +152,14 @@ $defs: name: type: - string + description: | + Configure logger names to match, evaluated as follows: + + * If the logger name exactly matches. + * If the logger name matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. config: $ref: "#/$defs/ExperimentalLoggerConfig" + description: The logger config. required: - name - config @@ -127,12 +172,24 @@ $defs: type: - boolean - "null" + description: | + Configure if the logger is enabled or not. + If omitted or null, false is used. minimum_severity: $ref: "#/$defs/ExperimentalSeverityNumber" + description: | + Configure severity filtering. + Log records with an non-zero (i.e. unspecified) severity number which is less than minimum_severity are not processed. + Values include: TRACE, TRACE2, TRACE3, TRACE4, DEBUG, DEBUG2, DEBUG3, DEBUG4, INFO, INFO2, INFO3, INFO4, WARN, WARN2, WARN3, WARN4, ERROR, ERROR2, ERROR3, ERROR4, FATAL, FATAL2, FATAL3, FATAL4. + If omitted or null, severity filtering is not applied. trace_based: type: - boolean - "null" + description: | + Configure trace based filtering. + If true, log records associated with unsampled trace contexts traces are not processed. If false, or if a log record is not associated with a trace context, trace based filtering is not applied. + If omitted or null, trace based filtering is not applied. ExperimentalSeverityNumber: type: - string @@ -162,3 +219,28 @@ $defs: - FATAL2 - FATAL3 - FATAL4 + enumDescriptions: + DEBUG: DEBUG, severity number 5. + DEBUG2: DEBUG2, severity number 6. + DEBUG3: DEBUG3, severity number 7. + DEBUG4: DEBUG4, severity number 8. + ERROR: ERROR, severity number 17. + ERROR2: ERROR2, severity number 18. + ERROR3: ERROR3, severity number 19. + ERROR4: ERROR4, severity number 20. + FATAL: FATAL, severity number 21. + FATAL2: FATAL2, severity number 22. + FATAL3: FATAL3, severity number 23. + FATAL4: FATAL4, severity number 24. + INFO: INFO, severity number 9. + INFO2: INFO2, severity number 10. + INFO3: INFO3, severity number 11. + INFO4: INFO4, severity number 12. + TRACE: TRACE, severity number 1. + TRACE2: TRACE2, severity number 2. + TRACE3: TRACE3, severity number 3. + TRACE4: TRACE4, severity number 4. + WARN: WARN, severity number 13. + WARN2: WARN2, severity number 14. + WARN3: WARN3, severity number 15. + WARN4: WARN4, severity number 16. diff --git a/schema/meta_schema_types.yaml b/schema/meta_schema_types.yaml deleted file mode 100644 index fa5b4b5a..00000000 --- a/schema/meta_schema_types.yaml +++ /dev/null @@ -1,1281 +0,0 @@ -- type: Aggregation - properties: - - property: base2_exponential_bucket_histogram - description: TODO - - property: default - description: TODO - - property: drop - description: TODO - - property: explicit_bucket_histogram - description: Configure aggregation to be explicit_bucket_histogram. - - property: last_value - description: TODO - - property: sum - description: TODO - isSdkExtensionPlugin: false -- type: AlwaysOffSampler - properties: [] - isSdkExtensionPlugin: false -- type: AlwaysOnSampler - properties: [] - isSdkExtensionPlugin: false -- type: AttributeLimits - properties: - - property: attribute_count_limit - description: | - Configure max attribute count. - Value must be non-negative. - If omitted or null, 128 is used. - - property: attribute_value_length_limit - description: | - Configure max attribute value size. - Value must be non-negative. - If omitted or null, there is no limit. - isSdkExtensionPlugin: false -- type: AttributeNameValue - properties: - - property: name - description: | - The attribute name. - - property: type - description: | - The attribute type. - Values include: string, bool, int, double, string_array, bool_array, int_array, double_array. - If omitted or null, string is used. - - property: value - description: | - The attribute value. - The type of value must match .type. - isSdkExtensionPlugin: false -- type: AttributeType - enumValues: - - enumValue: bool - description: TODO - - enumValue: bool_array - description: TODO - - enumValue: double - description: TODO - - enumValue: double_array - description: TODO - - enumValue: int - description: TODO - - enumValue: int_array - description: TODO - - enumValue: string - description: TODO - - enumValue: string_array - description: TODO - isSdkExtensionPlugin: false -- type: B3MultiPropagator - properties: [] - isSdkExtensionPlugin: false -- type: B3Propagator - properties: [] - isSdkExtensionPlugin: false -- type: BaggagePropagator - properties: [] - isSdkExtensionPlugin: false -- type: Base2ExponentialBucketHistogramAggregation - properties: - - property: max_scale - description: TODO - - property: max_size - description: TODO - - property: record_min_max - description: TODO - isSdkExtensionPlugin: false -- type: BatchLogRecordProcessor - properties: - - property: export_timeout - description: | - Configure maximum allowed time (in milliseconds) to export data. - Value must be non-negative. A value of 0 indicates no limit (infinity). - If omitted or null, 30000 is used. - - property: exporter - description: Configure exporter. - - property: max_export_batch_size - description: | - Configure maximum batch size. Value must be positive. - If omitted or null, 512 is used. - - property: max_queue_size - description: | - Configure maximum queue size. Value must be positive. - If omitted or null, 2048 is used. - - property: schedule_delay - description: | - Configure delay interval (in milliseconds) between two consecutive exports. - Value must be non-negative. - If omitted or null, 1000 is used. - isSdkExtensionPlugin: false -- type: BatchSpanProcessor - properties: - - property: export_timeout - description: | - Configure maximum allowed time (in milliseconds) to export data. - Value must be non-negative. A value of 0 indicates no limit (infinity). - If omitted or null, 30000 is used. - - property: exporter - description: Configure exporter. - - property: max_export_batch_size - description: | - Configure maximum batch size. Value must be positive. - If omitted or null, 512 is used. - - property: max_queue_size - description: | - Configure maximum queue size. Value must be positive. - If omitted or null, 2048 is used. - - property: schedule_delay - description: | - Configure delay interval (in milliseconds) between two consecutive exports. - Value must be non-negative. - If omitted or null, 5000 is used. - isSdkExtensionPlugin: false -- type: CardinalityLimits - properties: - - property: counter - description: | - Configure default cardinality limit for counter instruments. - If omitted or null, the value from .default is used. - - property: default - description: | - Configure default cardinality limit for all instrument types. - Instrument-specific cardinality limits take priority. - If omitted or null, 2000 is used. - - property: gauge - description: | - Configure default cardinality limit for gauge instruments. - If omitted or null, the value from .default is used. - - property: histogram - description: | - Configure default cardinality limit for histogram instruments. - If omitted or null, the value from .default is used. - - property: observable_counter - description: | - Configure default cardinality limit for observable_counter instruments. - If omitted or null, the value from .default is used. - - property: observable_gauge - description: | - Configure default cardinality limit for observable_gauge instruments. - If omitted or null, the value from .default is used. - - property: observable_up_down_counter - description: | - Configure default cardinality limit for observable_up_down_counter instruments. - If omitted or null, the value from .default is used. - - property: up_down_counter - description: | - Configure default cardinality limit for up_down_counter instruments. - If omitted or null, the value from .default is used. - isSdkExtensionPlugin: false -- type: ConsoleExporter - properties: [] - isSdkExtensionPlugin: false -- type: ConsoleMetricExporter - properties: - - property: default_histogram_aggregation - description: | - Configure default histogram aggregation. - Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, explicit_bucket_histogram is used. - - property: temporality_preference - description: | - Configure temporality preference. - Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, cumulative is used. - isSdkExtensionPlugin: false -- type: DefaultAggregation - properties: [] - isSdkExtensionPlugin: false -- type: DropAggregation - properties: [] - isSdkExtensionPlugin: false -- type: ExemplarFilter - enumValues: - - enumValue: always_off - description: TODO - - enumValue: always_on - description: TODO - - enumValue: trace_based - description: TODO - isSdkExtensionPlugin: false -- type: ExplicitBucketHistogramAggregation - properties: - - property: boundaries - description: | - Configure bucket boundaries. - If omitted, [0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000] is used. - - property: record_min_max - description: | - Configure record min and max. - If omitted or null, true is used. - isSdkExtensionPlugin: false -- type: ExporterDefaultHistogramAggregation - enumValues: - - enumValue: base2_exponential_bucket_histogram - description: TODO - - enumValue: explicit_bucket_histogram - description: TODO - isSdkExtensionPlugin: false -- type: ExporterTemporalityPreference - enumValues: - - enumValue: cumulative - description: TODO - - enumValue: delta - description: TODO - - enumValue: low_memory - description: TODO - isSdkExtensionPlugin: false -- type: GrpcTls - properties: - - property: ca_file - description: | - Configure certificate used to verify a server's TLS credentials. - Absolute path to certificate file in PEM format. - If omitted or null, system default certificate verification is used for secure connections. - - property: cert_file - description: | - Configure mTLS client certificate. - Absolute path to client certificate file in PEM format. If set, .client_key must also be set. - If omitted or null, mTLS is not used. - - property: insecure - description: | - Configure client transport security for the exporter's connection. - Only applicable when .endpoint is provided without http or https scheme. Implementations may choose to ignore .insecure. - If omitted or null, false is used. - - property: key_file - description: | - Configure mTLS private client key. - Absolute path to client key file in PEM format. If set, .client_certificate must also be set. - If omitted or null, mTLS is not used. - isSdkExtensionPlugin: false -- type: HttpTls - properties: - - property: ca_file - description: | - Configure certificate used to verify a server's TLS credentials. - Absolute path to certificate file in PEM format. - If omitted or null, system default certificate verification is used for secure connections. - - property: cert_file - description: | - Configure mTLS client certificate. - Absolute path to client certificate file in PEM format. If set, .client_key must also be set. - If omitted or null, mTLS is not used. - - property: key_file - description: | - Configure mTLS private client key. - Absolute path to client key file in PEM format. If set, .client_certificate must also be set. - If omitted or null, mTLS is not used. - isSdkExtensionPlugin: false -- type: IncludeExclude - properties: - - property: excluded - description: | - Configure list of value patterns to exclude. Applies after .included (i.e. excluded has higher priority than included). - Values are evaluated to match as follows: - * If the value exactly matches. - * If the value matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. - If omitted, .included attributes are included. - - property: included - description: | - Configure list of value patterns to include. - Values are evaluated to match as follows: - * If the value exactly matches. - * If the value matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. - If omitted, all values are included. - isSdkExtensionPlugin: false -- type: InstrumentType - enumValues: - - enumValue: counter - description: TODO - - enumValue: gauge - description: TODO - - enumValue: histogram - description: TODO - - enumValue: observable_counter - description: TODO - - enumValue: observable_gauge - description: TODO - - enumValue: observable_up_down_counter - description: TODO - - enumValue: up_down_counter - description: TODO - isSdkExtensionPlugin: false -- type: JaegerPropagator - properties: [] - isSdkExtensionPlugin: false -- type: LastValueAggregation - properties: [] - isSdkExtensionPlugin: false -- type: LoggerProvider - properties: - - property: limits - description: Configure log record limits. See also attribute_limits. - - property: processors - description: Configure log record processors. - - property: logger_configurator/development - description: | - Configure loggers. - isSdkExtensionPlugin: false -- type: LogRecordExporter - properties: - - property: console - description: Configure exporter to be console. - - property: otlp_grpc - description: Configure exporter to be OTLP with gRPC transport. - - property: otlp_http - description: Configure exporter to be OTLP with HTTP transport. - - property: otlp_file/development - description: | - Configure exporter to be OTLP with file transport. - isSdkExtensionPlugin: true -- type: LogRecordLimits - properties: - - property: attribute_count_limit - description: | - Configure max attribute count. Overrides .attribute_limits.attribute_count_limit. - Value must be non-negative. - If omitted or null, 128 is used. - - property: attribute_value_length_limit - description: | - Configure max attribute value size. Overrides .attribute_limits.attribute_value_length_limit. - Value must be non-negative. - If omitted or null, there is no limit. - isSdkExtensionPlugin: false -- type: LogRecordProcessor - properties: - - property: batch - description: Configure a batch log record processor. - - property: simple - description: Configure a simple log record processor. - isSdkExtensionPlugin: true -- type: MeterProvider - properties: - - property: exemplar_filter - description: | - Configure the exemplar filter. - Values include: trace_based, always_on, always_off. For behavior of values see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md#metrics-sdk-configuration. - If omitted or null, trace_based is used. - - property: readers - description: Configure metric readers. - - property: views - description: | - Configure views. - Each view has a selector which determines the instrument(s) it applies to, and a configuration for the resulting stream(s). - - property: meter_configurator/development - description: | - Configure meters. - isSdkExtensionPlugin: false -- type: MetricProducer - properties: - - property: opencensus - description: Configure metric producer to be opencensus. - isSdkExtensionPlugin: true -- type: MetricReader - properties: - - property: periodic - description: Configure a periodic metric reader. - - property: pull - description: Configure a pull based metric reader. - isSdkExtensionPlugin: false -- type: NameStringValuePair - properties: - - property: name - description: The name of the pair. - - property: value - description: The value of the pair. - isSdkExtensionPlugin: false -- type: OpenCensusMetricProducer - properties: [] - isSdkExtensionPlugin: false -- type: OpenTelemetryConfiguration - properties: - - property: attribute_limits - description: | - Configure general attribute limits. See also tracer_provider.limits, logger_provider.limits. - - property: disabled - description: | - Configure if the SDK is disabled or not. - If omitted or null, false is used. - - property: file_format - description: | - The file format version. - The yaml format is documented at - https://github.com/open-telemetry/opentelemetry-configuration/tree/main/schema - - property: log_level - description: | - Configure the log level of the internal logger used by the SDK. - If omitted, info is used. - - property: logger_provider - description: | - Configure logger provider. - If omitted, a noop logger provider is used. - - property: meter_provider - description: | - Configure meter provider. - If omitted, a noop meter provider is used. - - property: propagator - description: | - Configure text map context propagators. - If omitted, a noop propagator is used. - - property: resource - description: | - Configure resource for all signals. - If omitted, the default resource is used. - - property: tracer_provider - description: | - Configure tracer provider. - If omitted, a noop tracer provider is used. - - property: instrumentation/development - description: | - Configure instrumentation. - isSdkExtensionPlugin: false -- type: OpenTracingPropagator - properties: [] - isSdkExtensionPlugin: false -- type: OtlpGrpcExporter - properties: - - property: compression - description: | - Configure compression. - Values include: gzip, none. Implementations may support other compression algorithms. - If omitted or null, none is used. - - property: endpoint - description: | - Configure endpoint. - If omitted or null, http://localhost:4317 is used. - - property: headers - description: | - Configure headers. Entries have higher priority than entries from .headers_list. - If an entry's .value is null, the entry is ignored. - - property: headers_list - description: | - Configure headers. Entries have lower priority than entries from .headers. - The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. - If omitted or null, no headers are added. - - property: timeout - description: | - Configure max time (in milliseconds) to wait for each export. - Value must be non-negative. A value of 0 indicates no limit (infinity). - If omitted or null, 10000 is used. - - property: tls - description: Configure TLS settings for the exporter. - isSdkExtensionPlugin: false -- type: OtlpGrpcMetricExporter - properties: - - property: compression - description: | - Configure compression. - Values include: gzip, none. Implementations may support other compression algorithms. - If omitted or null, none is used. - - property: default_histogram_aggregation - description: | - Configure default histogram aggregation. - Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, explicit_bucket_histogram is used. - - property: endpoint - description: | - Configure endpoint. - If omitted or null, http://localhost:4317 is used. - - property: headers - description: | - Configure headers. Entries have higher priority than entries from .headers_list. - If an entry's .value is null, the entry is ignored. - - property: headers_list - description: | - Configure headers. Entries have lower priority than entries from .headers. - The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. - If omitted or null, no headers are added. - - property: temporality_preference - description: | - Configure temporality preference. - Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, cumulative is used. - - property: timeout - description: | - Configure max time (in milliseconds) to wait for each export. - Value must be non-negative. A value of 0 indicates no limit (infinity). - If omitted or null, 10000 is used. - - property: tls - description: Configure TLS settings for the exporter. - isSdkExtensionPlugin: false -- type: OtlpHttpEncoding - enumValues: - - enumValue: json - description: Protobuf JSON encoding. - - enumValue: protobuf - description: Protobuf binary encoding. - isSdkExtensionPlugin: false -- type: OtlpHttpExporter - properties: - - property: compression - description: | - Configure compression. - Values include: gzip, none. Implementations may support other compression algorithms. - If omitted or null, none is used. - - property: encoding - description: | - Configure the encoding used for messages. - Values include: protobuf, json. Implementations may not support json. - If omitted or null, protobuf is used. - - property: endpoint - description: | - Configure endpoint, including the signal specific path. - If omitted or null, the http://localhost:4318/v1/{signal} (where signal is 'traces', 'logs', or 'metrics') is used. - - property: headers - description: | - Configure headers. Entries have higher priority than entries from .headers_list. - If an entry's .value is null, the entry is ignored. - - property: headers_list - description: | - Configure headers. Entries have lower priority than entries from .headers. - The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. - If omitted or null, no headers are added. - - property: timeout - description: | - Configure max time (in milliseconds) to wait for each export. - Value must be non-negative. A value of 0 indicates no limit (infinity). - If omitted or null, 10000 is used. - - property: tls - description: Configure TLS settings for the exporter. - isSdkExtensionPlugin: false -- type: OtlpHttpMetricExporter - properties: - - property: compression - description: | - Configure compression. - Values include: gzip, none. Implementations may support other compression algorithms. - If omitted or null, none is used. - - property: default_histogram_aggregation - description: | - Configure default histogram aggregation. - Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, explicit_bucket_histogram is used. - - property: encoding - description: | - Configure the encoding used for messages. - Values include: protobuf, json. Implementations may not support json. - If omitted or null, protobuf is used. - - property: endpoint - description: | - Configure endpoint, including the signal specific path. - If omitted or null, the http://localhost:4318/v1/{signal} (where signal is 'traces', 'logs', or 'metrics') is used. - - property: endpoint - description: | - Configure endpoint. - If omitted or null, http://localhost:4317 is used. - - property: headers - description: | - Configure headers. Entries have higher priority than entries from .headers_list. - If an entry's .value is null, the entry is ignored. - - property: headers_list - description: | - Configure headers. Entries have lower priority than entries from .headers. - The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. - If omitted or null, no headers are added. - - property: temporality_preference - description: | - Configure temporality preference. - Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, cumulative is used. - - property: timeout - description: | - Configure max time (in milliseconds) to wait for each export. - Value must be non-negative. A value of 0 indicates no limit (infinity). - If omitted or null, 10000 is used. - - property: tls - description: Configure TLS settings for the exporter. - isSdkExtensionPlugin: false -- type: ParentBasedSampler - properties: - - property: local_parent_not_sampled - description: | - Configure local_parent_not_sampled sampler. - If omitted or null, always_off is used. - - property: local_parent_sampled - description: | - Configure local_parent_sampled sampler. - If omitted or null, always_on is used. - - property: remote_parent_not_sampled - description: | - Configure remote_parent_not_sampled sampler. - If omitted or null, always_off is used. - - property: remote_parent_sampled - description: | - Configure remote_parent_sampled sampler. - If omitted or null, always_on is used. - - property: root - description: | - Configure root sampler. - If omitted or null, always_on is used. - isSdkExtensionPlugin: false -- type: PeriodicMetricReader - properties: - - property: cardinality_limits - description: Configure cardinality limits. - - property: exporter - description: Configure exporter. - - property: interval - description: | - Configure delay interval (in milliseconds) between start of two consecutive exports. - Value must be non-negative. - If omitted or null, 60000 is used. - - property: producers - description: Configure metric producers. - - property: timeout - description: | - Configure maximum allowed time (in milliseconds) to export data. - Value must be non-negative. A value of 0 indicates no limit (infinity). - If omitted or null, 30000 is used. - isSdkExtensionPlugin: false -- type: Propagator - properties: - - property: composite - description: | - Configure the propagators in the composite text map propagator. Entries from .composite_list are appended to the list here with duplicates filtered out. - Built-in propagator keys include: tracecontext, baggage, b3, b3multi, jaeger, ottrace. Known third party keys include: xray. - If the resolved list of propagators (from .composite and .composite_list) is empty, a noop propagator is used. - - property: composite_list - description: | - Configure the propagators in the composite text map propagator. Entries are appended to .composite with duplicates filtered out. - The value is a comma separated list of propagator identifiers matching the format of OTEL_PROPAGATORS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md#general-sdk-configuration for details. - Built-in propagator identifiers include: tracecontext, baggage, b3, b3multi, jaeger, ottrace. Known third party identifiers include: xray. - If the resolved list of propagators (from .composite and .composite_list) is empty, a noop propagator is used. - isSdkExtensionPlugin: false -- type: PullMetricExporter - properties: - - property: prometheus/development - description: | - Configure exporter to be prometheus. - isSdkExtensionPlugin: true -- type: PullMetricReader - properties: - - property: cardinality_limits - description: Configure cardinality limits. - - property: exporter - description: Configure exporter. - - property: producers - description: Configure metric producers. - isSdkExtensionPlugin: false -- type: PushMetricExporter - properties: - - property: console - description: | - Configure exporter to be console. - - property: otlp_grpc - description: | - Configure exporter to be OTLP with gRPC transport. - - property: otlp_http - description: | - Configure exporter to be OTLP with HTTP transport. - - property: otlp_file/development - description: | - Configure exporter to be OTLP with file transport. - isSdkExtensionPlugin: true -- type: Resource - properties: - - property: attributes - description: | - Configure resource attributes. Entries have higher priority than entries from .resource.attributes_list. - - property: attributes_list - description: | - Configure resource attributes. Entries have lower priority than entries from .resource.attributes. - The value is a list of comma separated key-value pairs matching the format of OTEL_RESOURCE_ATTRIBUTES. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md#general-sdk-configuration for details. - If omitted or null, no resource attributes are added. - - property: schema_url - description: | - Configure resource schema URL. - If omitted or null, no schema URL is used. - - property: detection/development - description: | - Configure resource detection. - If omitted or null, resource detection is disabled. - isSdkExtensionPlugin: false -- type: Sampler - properties: - - property: always_off - description: Configure sampler to be always_off. - - property: always_on - description: Configure sampler to be always_on. - - property: parent_based - description: Configure sampler to be parent_based. - - property: trace_id_ratio_based - description: Configure sampler to be trace_id_ratio_based. - - property: composite/development - description: Configure sampler to be composite. - - property: jaeger_remote/development - description: TODO - - property: probability/development - description: Configure sampler to be probability. - isSdkExtensionPlugin: true -- type: SimpleLogRecordProcessor - properties: - - property: exporter - description: Configure exporter. - isSdkExtensionPlugin: false -- type: SimpleSpanProcessor - properties: - - property: exporter - description: Configure exporter. - isSdkExtensionPlugin: false -- type: SpanExporter - properties: - - property: console - description: Configure exporter to be console. - - property: otlp_grpc - description: Configure exporter to be OTLP with gRPC transport. - - property: otlp_http - description: Configure exporter to be OTLP with HTTP transport. - - property: zipkin - description: Configure exporter to be zipkin. - - property: otlp_file/development - description: | - Configure exporter to be OTLP with file transport. - isSdkExtensionPlugin: true -- type: SpanLimits - properties: - - property: attribute_count_limit - description: | - Configure max attribute count. Overrides .attribute_limits.attribute_count_limit. - Value must be non-negative. - If omitted or null, 128 is used. - - property: attribute_value_length_limit - description: | - Configure max attribute value size. Overrides .attribute_limits.attribute_value_length_limit. - Value must be non-negative. - If omitted or null, there is no limit. - - property: event_attribute_count_limit - description: | - Configure max attributes per span event. - Value must be non-negative. - If omitted or null, 128 is used. - - property: event_count_limit - description: | - Configure max span event count. - Value must be non-negative. - If omitted or null, 128 is used. - - property: link_attribute_count_limit - description: | - Configure max attributes per span link. - Value must be non-negative. - If omitted or null, 128 is used. - - property: link_count_limit - description: | - Configure max span link count. - Value must be non-negative. - If omitted or null, 128 is used. - isSdkExtensionPlugin: false -- type: SpanProcessor - properties: - - property: batch - description: Configure a batch span processor. - - property: simple - description: Configure a simple span processor. - isSdkExtensionPlugin: true -- type: SumAggregation - properties: [] - isSdkExtensionPlugin: false -- type: TextMapPropagator - properties: - - property: b3 - description: Include the zipkin b3 propagator. - - property: b3multi - description: Include the zipkin b3 multi propagator. - - property: baggage - description: Include the w3c baggage propagator. - - property: jaeger - description: Include the jaeger propagator. - - property: ottrace - description: Include the opentracing propagator. - - property: tracecontext - description: Include the w3c trace context propagator. - isSdkExtensionPlugin: true -- type: TraceContextPropagator - properties: [] - isSdkExtensionPlugin: false -- type: TraceIdRatioBasedSampler - properties: - - property: ratio - description: | - Configure trace_id_ratio. - If omitted or null, 1.0 is used. - isSdkExtensionPlugin: false -- type: TracerProvider - properties: - - property: limits - description: Configure span limits. See also attribute_limits. - - property: processors - description: Configure span processors. - - property: sampler - description: | - Configure the sampler. - If omitted, parent based sampler with a root of always_on is used. - - property: tracer_configurator/development - description: | - Configure tracers. - isSdkExtensionPlugin: false -- type: View - properties: - - property: selector - description: | - Configure view selector. - Selection criteria is additive as described in https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#instrument-selection-criteria. - - property: stream - description: Configure view stream. - isSdkExtensionPlugin: false -- type: ViewSelector - properties: - - property: instrument_name - description: | - Configure instrument name selection criteria. - If omitted or null, all instrument names match. - - property: instrument_type - description: | - Configure instrument type selection criteria. - Values include: counter, gauge, histogram, observable_counter, observable_gauge, observable_up_down_counter, up_down_counter. - If omitted or null, all instrument types match. - - property: meter_name - description: | - Configure meter name selection criteria. - If omitted or null, all meter names match. - - property: meter_schema_url - description: | - Configure meter schema url selection criteria. - If omitted or null, all meter schema URLs match. - - property: meter_version - description: | - Configure meter version selection criteria. - If omitted or null, all meter versions match. - - property: unit - description: | - Configure the instrument unit selection criteria. - If omitted or null, all instrument units match. - isSdkExtensionPlugin: false -- type: ViewStream - properties: - - property: aggregation - description: | - Configure aggregation of the resulting stream(s). - Values include: default, drop, explicit_bucket_histogram, base2_exponential_bucket_histogram, last_value, sum. For behavior of values see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#aggregation. - If omitted, default is used. - - property: aggregation_cardinality_limit - description: | - Configure the aggregation cardinality limit. - If omitted or null, the metric reader's default cardinality limit is used. - - property: attribute_keys - description: | - Configure attribute keys retained in the resulting stream(s). - - property: description - description: | - Configure metric description of the resulting stream(s). - If omitted or null, the instrument's origin description is used. - - property: name - description: | - Configure metric name of the resulting stream(s). - If omitted or null, the instrument's original name is used. - isSdkExtensionPlugin: false -- type: ZipkinSpanExporter - properties: - - property: endpoint - description: | - Configure endpoint. - If omitted or null, http://localhost:9411/api/v2/spans is used. - - property: timeout - description: | - Configure max time (in milliseconds) to wait for each export. - Value must be non-negative. A value of 0 indicates indefinite. - If omitted or null, 10000 is used. - isSdkExtensionPlugin: false -- type: ExperimentalComposableAlwaysOffSampler - properties: [] - isSdkExtensionPlugin: false -- type: ExperimentalComposableAlwaysOnSampler - properties: [] - isSdkExtensionPlugin: false -- type: ExperimentalComposableParentBasedSampler - properties: - - property: local_parent_not_sampled - description: Configures the sampler for spans with a local parent that is not sampled. - - property: local_parent_sampled - description: Configures the sampler for spans with a local parent that is sampled. - - property: remote_parent_not_sampled - description: Configures the sampler for spans with a remote parent that is not sampled. - - property: remote_parent_sampled - description: Configures the sampler for spans with a remote parent that is sampled. - - property: root - description: Configures the sampler for spans with no parent. - isSdkExtensionPlugin: false -- type: ExperimentalComposableProbabilitySampler - properties: - - property: ratio - description: | - Configure ratio. - If omitted or null, 1.0 is used. - isSdkExtensionPlugin: false -- type: ExperimentalComposableSampler - properties: - - property: always_off - description: Configure sampler to be always_off. - - property: always_on - description: Configure sampler to be always_on. - - property: parent_based - description: Configure sampler to be parent_based. - - property: probability - description: Configure sampler to be probability. - isSdkExtensionPlugin: false -- type: ExperimentalContainerResourceDetector - properties: [] - isSdkExtensionPlugin: false -- type: ExperimentalGeneralInstrumentation - properties: - - property: http - description: | - Configure instrumentations following the http semantic conventions. - See http semantic conventions: https://opentelemetry.io/docs/specs/semconv/http/ - - property: peer - description: | - Configure instrumentations following the peer semantic conventions. - See peer semantic conventions: https://opentelemetry.io/docs/specs/semconv/attributes-registry/peer/ - isSdkExtensionPlugin: false -- type: ExperimentalHostResourceDetector - properties: [] - isSdkExtensionPlugin: false -- type: ExperimentalHttpClientInstrumentation - properties: - - property: request_captured_headers - description: | - Configure headers to capture for outbound http requests. - - property: response_captured_headers - description: | - Configure headers to capture for inbound http responses. - isSdkExtensionPlugin: false -- type: ExperimentalHttpInstrumentation - properties: - - property: client - description: Configure instrumentations following the http client semantic conventions. - - property: server - description: Configure instrumentations following the http server semantic conventions. - isSdkExtensionPlugin: false -- type: ExperimentalHttpServerInstrumentation - properties: - - property: request_captured_headers - description: | - Configure headers to capture for inbound http requests. - - property: response_captured_headers - description: | - Configure headers to capture for outbound http responses. - isSdkExtensionPlugin: false -- type: ExperimentalInstrumentation - properties: - - property: cpp - description: Configure C++ language-specific instrumentation libraries. - - property: dotnet - description: | - Configure .NET language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: erlang - description: | - Configure Erlang language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: general - description: | - Configure general SemConv options that may apply to multiple languages and instrumentations. - Instrumenation may merge general config options with the language specific configuration at .instrumentation.. - - property: go - description: | - Configure Go language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: java - description: | - Configure Java language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: js - description: | - Configure JavaScript language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: php - description: | - Configure PHP language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: python - description: | - Configure Python language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: ruby - description: | - Configure Ruby language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: rust - description: | - Configure Rust language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - - property: swift - description: | - Configure Swift language-specific instrumentation libraries. - Each entry's key identifies a particular instrumentation library. The corresponding value configures it. - isSdkExtensionPlugin: false -- type: ExperimentalJaegerRemoteSampler - properties: - - property: endpoint - description: TODO - - property: initial_sampler - description: TODO - - property: interval - description: TODO - isSdkExtensionPlugin: false -- type: ExperimentalLanguageSpecificInstrumentation - properties: [] - isSdkExtensionPlugin: false -- type: ExperimentalLoggerConfig - properties: - - property: disabled - description: | - Configure if the logger is enabled or not. - If omitted or null, false is used. - - property: minimum_severity - description: | - Configure severity filtering. - Log records with an non-zero (i.e. unspecified) severity number which is less than minimum_severity are not processed. - Values include: TRACE, TRACE2, TRACE3, TRACE4, DEBUG, DEBUG2, DEBUG3, DEBUG4, INFO, INFO2, INFO3, INFO4, WARN, WARN2, WARN3, WARN4, ERROR, ERROR2, ERROR3, ERROR4, FATAL, FATAL2, FATAL3, FATAL4. - If omitted or null, severity filtering is not applied. - - property: trace_based - description: | - Configure trace based filtering. - If true, log records associated with unsampled trace contexts traces are not processed. If false, or if a log record is not associated with a trace context, trace based filtering is not applied. - If omitted or null, trace based filtering is not applied. - isSdkExtensionPlugin: false -- type: ExperimentalLoggerConfigurator - properties: - - property: default_config - description: Configure the default logger config used there is no matching entry in .logger_configurator/development.loggers. - - property: loggers - description: Configure loggers. - isSdkExtensionPlugin: false -- type: ExperimentalLoggerMatcherAndConfig - properties: - - property: config - description: The logger config. - - property: name - description: | - Configure logger names to match, evaluated as follows: - - * If the logger name exactly matches. - * If the logger name matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. - isSdkExtensionPlugin: false -- type: ExperimentalMeterConfig - properties: - - property: disabled - description: Configure if the meter is enabled or not. - isSdkExtensionPlugin: false -- type: ExperimentalMeterConfigurator - properties: - - property: default_config - description: Configure the default meter config used there is no matching entry in .meter_configurator/development.meters. - - property: meters - description: Configure meters. - isSdkExtensionPlugin: false -- type: ExperimentalMeterMatcherAndConfig - properties: - - property: config - description: The meter config. - - property: name - description: | - Configure meter names to match, evaluated as follows: - - * If the meter name exactly matches. - * If the meter name matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. - isSdkExtensionPlugin: false -- type: ExperimentalOtlpFileExporter - properties: - - property: output_stream - description: | - Configure output stream. - Values include stdout, or scheme+destination. For example: file:///path/to/file.jsonl. - If omitted or null, stdout is used. - isSdkExtensionPlugin: false -- type: ExperimentalOtlpFileMetricExporter - properties: - - property: default_histogram_aggregation - description: | - Configure default histogram aggregation. - Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, explicit_bucket_histogram is used. - - property: output_stream - description: | - Configure output stream. - Values include stdout, or scheme+destination. For example: file:///path/to/file.jsonl. - If omitted or null, stdout is used. - - property: temporality_preference - description: | - Configure temporality preference. - Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. - If omitted or null, cumulative is used. - isSdkExtensionPlugin: false -- type: ExperimentalPeerInstrumentation - properties: - - property: service_mapping - description: | - Configure the service mapping for instrumentations following peer.service semantic conventions. - See peer.service semantic conventions: https://opentelemetry.io/docs/specs/semconv/general/attributes/#general-remote-service-attributes - isSdkExtensionPlugin: false -- type: ExperimentalPeerServiceMapping - properties: - - property: peer - description: | - The IP address to map. - - property: service - description: | - The logical name corresponding to the IP address of .peer. - isSdkExtensionPlugin: false -- type: ExperimentalProbabilitySampler - properties: - - property: ratio - description: | - Configure ratio. - If omitted or null, 1.0 is used. - isSdkExtensionPlugin: false -- type: ExperimentalProcessResourceDetector - properties: [] - isSdkExtensionPlugin: false -- type: ExperimentalPrometheusMetricExporter - properties: - - property: host - description: | - Configure host. - If omitted or null, localhost is used. - - property: port - description: | - Configure port. - If omitted or null, 9464 is used. - - property: translation_strategy - description: | - Configure how Prometheus metrics are exposed. Values include: - - * UnderscoreEscapingWithSuffixes, the default. This fully escapes metric names for classic Prometheus metric name compatibility, and includes appending type and unit suffixes. - * UnderscoreEscapingWithoutSuffixes, metric names will continue to escape special characters to _, but suffixes won't be attached. - * NoUTF8EscapingWithSuffixes will disable changing special characters to _. Special suffixes like units and _total for counters will be attached. - * NoTranslation. This strategy bypasses all metric and label name translation, passing them through unaltered. - - If omitted or null, UnderscoreEscapingWithSuffixes is used. - - property: with_resource_constant_labels - description: Configure Prometheus Exporter to add resource attributes as metrics attributes, where the resource attribute keys match the patterns. - - property: without_scope_info - description: | - Configure Prometheus Exporter to produce metrics without a scope info metric. - If omitted or null, false is used. - - property: without_target_info - description: | - Configure Prometheus Exporter to produce metrics without a target info metric for the resource. - If omitted or null, false is used. - isSdkExtensionPlugin: false -- type: ExperimentalPrometheusTranslationStrategy - enumValues: - - enumValue: NoTranslation - description: Special character escaping is disabled. Type and unit suffixes are disabled. Metric names are unaltered. - - enumValue: NoUTF8EscapingWithSuffixes - description: Special character escaping is disabled. Type and unit suffixes are enabled. - - enumValue: UnderscoreEscapingWithoutSuffixes - description: Special character escaping is enabled. Type and unit suffixes are disabled. This represents classic Prometheus metric name compatibility. - - enumValue: UnderscoreEscapingWithSuffixes - description: Special character escaping is enabled. Type and unit suffixes are enabled. - isSdkExtensionPlugin: false -- type: ExperimentalResourceDetection - properties: - - property: attributes - description: Configure attributes provided by resource detectors. - - property: detectors - description: | - Configure resource detectors. - Resource detector names are dependent on the SDK language ecosystem. Please consult documentation for each respective language. - If omitted or null, no resource detectors are enabled. - isSdkExtensionPlugin: false -- type: ExperimentalResourceDetector - properties: - - property: container - description: | - Enable the container resource detector, which populates container.* attributes. - - property: host - description: | - Enable the host resource detector, which populates host.* and os.* attributes. - - property: process - description: | - Enable the process resource detector, which populates process.* attributes. - - property: service - description: | - Enable the service detector, which populates service.name based on the OTEL_SERVICE_NAME environment variable and service.instance.id. - isSdkExtensionPlugin: true -- type: ExperimentalServiceResourceDetector - properties: [] - isSdkExtensionPlugin: false -- type: ExperimentalSeverityNumber - enumValues: - - enumValue: DEBUG - description: DEBUG, severity number 5. - - enumValue: DEBUG2 - description: DEBUG2, severity number 6. - - enumValue: DEBUG3 - description: DEBUG3, severity number 7. - - enumValue: DEBUG4 - description: DEBUG4, severity number 8. - - enumValue: ERROR - description: ERROR, severity number 17. - - enumValue: ERROR2 - description: ERROR2, severity number 18. - - enumValue: ERROR3 - description: ERROR3, severity number 19. - - enumValue: ERROR4 - description: ERROR4, severity number 20. - - enumValue: FATAL - description: FATAL, severity number 21. - - enumValue: FATAL2 - description: FATAL2, severity number 22. - - enumValue: FATAL3 - description: FATAL3, severity number 23. - - enumValue: FATAL4 - description: FATAL4, severity number 24. - - enumValue: INFO - description: INFO, severity number 9. - - enumValue: INFO2 - description: INFO2, severity number 10. - - enumValue: INFO3 - description: INFO3, severity number 11. - - enumValue: INFO4 - description: INFO4, severity number 12. - - enumValue: TRACE - description: TRACE, severity number 1. - - enumValue: TRACE2 - description: TRACE2, severity number 2. - - enumValue: TRACE3 - description: TRACE3, severity number 3. - - enumValue: TRACE4 - description: TRACE4, severity number 4. - - enumValue: WARN - description: WARN, severity number 13. - - enumValue: WARN2 - description: WARN2, severity number 14. - - enumValue: WARN3 - description: WARN3, severity number 15. - - enumValue: WARN4 - description: WARN4, severity number 16. - isSdkExtensionPlugin: false -- type: ExperimentalTracerConfig - properties: - - property: disabled - description: Configure if the tracer is enabled or not. - isSdkExtensionPlugin: false -- type: ExperimentalTracerConfigurator - properties: - - property: default_config - description: Configure the default tracer config used there is no matching entry in .tracer_configurator/development.tracers. - - property: tracers - description: Configure tracers. - isSdkExtensionPlugin: false -- type: ExperimentalTracerMatcherAndConfig - properties: - - property: config - description: The tracer config. - - property: name - description: | - Configure tracer names to match, evaluated as follows: - - * If the tracer name exactly matches. - * If the tracer name matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. - isSdkExtensionPlugin: false diff --git a/schema/meter_provider.yaml b/schema/meter_provider.yaml index cc683740..7a8f4834 100644 --- a/schema/meter_provider.yaml +++ b/schema/meter_provider.yaml @@ -6,15 +6,25 @@ properties: minItems: 1 items: $ref: "#/$defs/MetricReader" + description: Configure metric readers. views: type: array minItems: 1 items: $ref: "#/$defs/View" + description: | + Configure views. + Each view has a selector which determines the instrument(s) it applies to, and a configuration for the resulting stream(s). exemplar_filter: $ref: "#/$defs/ExemplarFilter" + description: | + Configure the exemplar filter. + Values include: trace_based, always_on, always_off. For behavior of values see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md#metrics-sdk-configuration. + If omitted or null, trace_based is used. meter_configurator/development: $ref: "#/$defs/ExperimentalMeterConfigurator" + description: | + Configure meters. required: - readers $defs: @@ -26,6 +36,10 @@ $defs: - always_on - always_off - trace_based + enumDescriptions: + always_off: TODO + always_on: TODO + trace_based: TODO PeriodicMetricReader: type: object additionalProperties: false @@ -35,20 +49,31 @@ $defs: - integer - "null" minimum: 0 + description: | + Configure delay interval (in milliseconds) between start of two consecutive exports. + Value must be non-negative. + If omitted or null, 60000 is used. timeout: type: - integer - "null" minimum: 0 + description: | + Configure maximum allowed time (in milliseconds) to export data. + Value must be non-negative. A value of 0 indicates no limit (infinity). + If omitted or null, 30000 is used. exporter: $ref: "#/$defs/PushMetricExporter" + description: Configure exporter. producers: type: array minItems: 1 items: $ref: "#/$defs/MetricProducer" + description: Configure metric producers. cardinality_limits: $ref: "#/$defs/CardinalityLimits" + description: Configure cardinality limits. required: - exporter PullMetricReader: @@ -57,13 +82,16 @@ $defs: properties: exporter: $ref: "#/$defs/PullMetricExporter" + description: Configure exporter. producers: type: array minItems: 1 items: $ref: "#/$defs/MetricProducer" + description: Configure metric producers. cardinality_limits: $ref: "#/$defs/CardinalityLimits" + description: Configure cardinality limits. required: - exporter CardinalityLimits: @@ -75,41 +103,66 @@ $defs: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for all instrument types. + Instrument-specific cardinality limits take priority. + If omitted or null, 2000 is used. counter: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for counter instruments. + If omitted or null, the value from .default is used. gauge: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for gauge instruments. + If omitted or null, the value from .default is used. histogram: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for histogram instruments. + If omitted or null, the value from .default is used. observable_counter: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for observable_counter instruments. + If omitted or null, the value from .default is used. observable_gauge: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for observable_gauge instruments. + If omitted or null, the value from .default is used. observable_up_down_counter: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for observable_up_down_counter instruments. + If omitted or null, the value from .default is used. up_down_counter: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure default cardinality limit for up_down_counter instruments. + If omitted or null, the value from .default is used. PushMetricExporter: type: object additionalProperties: @@ -121,12 +174,21 @@ $defs: properties: otlp_http: $ref: "#/$defs/OtlpHttpMetricExporter" + description: | + Configure exporter to be OTLP with HTTP transport. otlp_grpc: $ref: "#/$defs/OtlpGrpcMetricExporter" + description: | + Configure exporter to be OTLP with gRPC transport. otlp_file/development: $ref: "#/$defs/ExperimentalOtlpFileMetricExporter" + description: | + Configure exporter to be OTLP with file transport. console: $ref: "#/$defs/ConsoleMetricExporter" + description: | + Configure exporter to be console. + isSdkExtensionPlugin: true PullMetricExporter: type: object additionalProperties: @@ -138,6 +200,9 @@ $defs: properties: prometheus/development: $ref: "#/$defs/ExperimentalPrometheusMetricExporter" + description: | + Configure exporter to be prometheus. + isSdkExtensionPlugin: true MetricProducer: type: object additionalProperties: @@ -149,6 +214,8 @@ $defs: properties: opencensus: $ref: "#/$defs/OpenCensusMetricProducer" + description: Configure metric producer to be opencensus. + isSdkExtensionPlugin: true OpenCensusMetricProducer: type: - object @@ -164,22 +231,44 @@ $defs: type: - string - "null" + description: | + Configure host. + If omitted or null, localhost is used. port: type: - integer - "null" + description: | + Configure port. + If omitted or null, 9464 is used. without_scope_info: type: - boolean - "null" + description: | + Configure Prometheus Exporter to produce metrics without a scope info metric. + If omitted or null, false is used. without_target_info: type: - boolean - "null" + description: | + Configure Prometheus Exporter to produce metrics without a target info metric for the resource. + If omitted or null, false is used. with_resource_constant_labels: $ref: common.yaml#/$defs/IncludeExclude + description: Configure Prometheus Exporter to add resource attributes as metrics attributes, where the resource attribute keys match the patterns. translation_strategy: $ref: "#/$defs/ExperimentalPrometheusTranslationStrategy" + description: | + Configure how Prometheus metrics are exposed. Values include: + + * UnderscoreEscapingWithSuffixes, the default. This fully escapes metric names for classic Prometheus metric name compatibility, and includes appending type and unit suffixes. + * UnderscoreEscapingWithoutSuffixes, metric names will continue to escape special characters to _, but suffixes won't be attached. + * NoUTF8EscapingWithSuffixes will disable changing special characters to _. Special suffixes like units and _total for counters will be attached. + * NoTranslation. This strategy bypasses all metric and label name translation, passing them through unaltered. + + If omitted or null, UnderscoreEscapingWithSuffixes is used. ExperimentalPrometheusTranslationStrategy: type: - string @@ -189,6 +278,11 @@ $defs: - UnderscoreEscapingWithoutSuffixes - NoUTF8EscapingWithSuffixes - NoTranslation + enumDescriptions: + NoTranslation: Special character escaping is disabled. Type and unit suffixes are disabled. Metric names are unaltered. + NoUTF8EscapingWithSuffixes: Special character escaping is disabled. Type and unit suffixes are enabled. + UnderscoreEscapingWithoutSuffixes: Special character escaping is enabled. Type and unit suffixes are disabled. This represents classic Prometheus metric name compatibility. + UnderscoreEscapingWithSuffixes: Special character escaping is enabled. Type and unit suffixes are enabled. MetricReader: type: object additionalProperties: false @@ -197,8 +291,10 @@ $defs: properties: periodic: $ref: "#/$defs/PeriodicMetricReader" + description: Configure a periodic metric reader. pull: $ref: "#/$defs/PullMetricReader" + description: Configure a pull based metric reader. ExporterTemporalityPreference: type: - string @@ -207,6 +303,10 @@ $defs: - cumulative - delta - low_memory + enumDescriptions: + cumulative: TODO + delta: TODO + low_memory: TODO ExporterDefaultHistogramAggregation: type: - string @@ -214,6 +314,9 @@ $defs: enum: - explicit_bucket_histogram - base2_exponential_bucket_histogram + enumDescriptions: + base2_exponential_bucket_histogram: TODO + explicit_bucket_histogram: TODO OtlpHttpMetricExporter: type: - object @@ -224,32 +327,63 @@ $defs: type: - string - "null" + description: | + Configure endpoint. + If omitted or null, http://localhost:4318/v1/metrics is used. tls: $ref: common.yaml#/$defs/HttpTls + description: Configure TLS settings for the exporter. headers: type: array minItems: 1 items: $ref: common.yaml#/$defs/NameStringValuePair + description: | + Configure headers. Entries have higher priority than entries from .headers_list. + If an entry's .value is null, the entry is ignored. headers_list: type: - string - "null" + description: | + Configure headers. Entries have lower priority than entries from .headers. + The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. + If omitted or null, no headers are added. compression: type: - string - "null" + description: | + Configure compression. + Values include: gzip, none. Implementations may support other compression algorithms. + If omitted or null, none is used. timeout: type: - integer - "null" minimum: 0 + description: | + Configure max time (in milliseconds) to wait for each export. + Value must be non-negative. A value of 0 indicates no limit (infinity). + If omitted or null, 10000 is used. encoding: $ref: common.yaml#/$defs/OtlpHttpEncoding + description: | + Configure the encoding used for messages. + Values include: protobuf, json. Implementations may not support json. + If omitted or null, protobuf is used. temporality_preference: $ref: "#/$defs/ExporterTemporalityPreference" + description: | + Configure temporality preference. + Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, cumulative is used. default_histogram_aggregation: $ref: "#/$defs/ExporterDefaultHistogramAggregation" + description: | + Configure default histogram aggregation. + Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, explicit_bucket_histogram is used. OtlpGrpcMetricExporter: type: - object @@ -260,30 +394,57 @@ $defs: type: - string - "null" + description: | + Configure endpoint. + If omitted or null, http://localhost:4317 is used. tls: $ref: common.yaml#/$defs/GrpcTls + description: Configure TLS settings for the exporter. headers: type: array minItems: 1 items: $ref: common.yaml#/$defs/NameStringValuePair + description: | + Configure headers. Entries have higher priority than entries from .headers_list. + If an entry's .value is null, the entry is ignored. headers_list: type: - string - "null" + description: | + Configure headers. Entries have lower priority than entries from .headers. + The value is a list of comma separated key-value pairs matching the format of OTEL_EXPORTER_OTLP_HEADERS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#configuration-options for details. + If omitted or null, no headers are added. compression: type: - string - "null" + description: | + Configure compression. + Values include: gzip, none. Implementations may support other compression algorithms. + If omitted or null, none is used. timeout: type: - integer - "null" minimum: 0 + description: | + Configure max time (in milliseconds) to wait for each export. + Value must be non-negative. A value of 0 indicates no limit (infinity). + If omitted or null, 10000 is used. temporality_preference: $ref: "#/$defs/ExporterTemporalityPreference" + description: | + Configure temporality preference. + Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, cumulative is used. default_histogram_aggregation: $ref: "#/$defs/ExporterDefaultHistogramAggregation" + description: | + Configure default histogram aggregation. + Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, explicit_bucket_histogram is used. ExperimentalOtlpFileMetricExporter: type: - object @@ -294,10 +455,22 @@ $defs: type: - string - "null" + description: | + Configure output stream. + Values include stdout, or scheme+destination. For example: file:///path/to/file.jsonl. + If omitted or null, stdout is used. temporality_preference: $ref: "#/$defs/ExporterTemporalityPreference" + description: | + Configure temporality preference. + Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, cumulative is used. default_histogram_aggregation: $ref: "#/$defs/ExporterDefaultHistogramAggregation" + description: | + Configure default histogram aggregation. + Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, explicit_bucket_histogram is used. ConsoleMetricExporter: type: - object @@ -306,16 +479,28 @@ $defs: properties: temporality_preference: $ref: "#/$defs/ExporterTemporalityPreference" + description: | + Configure temporality preference. + Values include: cumulative, delta, low_memory. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, cumulative is used. default_histogram_aggregation: $ref: "#/$defs/ExporterDefaultHistogramAggregation" + description: | + Configure default histogram aggregation. + Values include: explicit_bucket_histogram, base2_exponential_bucket_histogram. For behavior of values, see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk_exporters/otlp.md. + If omitted or null, explicit_bucket_histogram is used. View: type: object additionalProperties: false properties: selector: $ref: "#/$defs/ViewSelector" + description: | + Configure view selector. + Selection criteria is additive as described in https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#instrument-selection-criteria. stream: $ref: "#/$defs/ViewStream" + description: Configure view stream. required: - selector - stream @@ -327,24 +512,43 @@ $defs: type: - string - "null" + description: | + Configure instrument name selection criteria. + If omitted or null, all instrument names match. instrument_type: $ref: "#/$defs/InstrumentType" + description: | + Configure instrument type selection criteria. + Values include: counter, gauge, histogram, observable_counter, observable_gauge, observable_up_down_counter, up_down_counter. + If omitted or null, all instrument types match. unit: type: - string - "null" + description: | + Configure the instrument unit selection criteria. + If omitted or null, all instrument units match. meter_name: type: - string - "null" + description: | + Configure meter name selection criteria. + If omitted or null, all meter names match. meter_version: type: - string - "null" + description: | + Configure meter version selection criteria. + If omitted or null, all meter versions match. meter_schema_url: type: - string - "null" + description: | + Configure meter schema url selection criteria. + If omitted or null, all meter schema URLs match. InstrumentType: type: - string @@ -357,6 +561,14 @@ $defs: - observable_gauge - observable_up_down_counter - up_down_counter + enumDescriptions: + counter: TODO + gauge: TODO + histogram: TODO + observable_counter: TODO + observable_gauge: TODO + observable_up_down_counter: TODO + up_down_counter: TODO ViewStream: type: object additionalProperties: false @@ -365,19 +577,34 @@ $defs: type: - string - "null" + description: | + Configure metric name of the resulting stream(s). + If omitted or null, the instrument's original name is used. description: type: - string - "null" + description: | + Configure metric description of the resulting stream(s). + If omitted or null, the instrument's origin description is used. aggregation: $ref: "#/$defs/Aggregation" + description: | + Configure aggregation of the resulting stream(s). + Values include: default, drop, explicit_bucket_histogram, base2_exponential_bucket_histogram, last_value, sum. For behavior of values see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#aggregation. + If omitted, default is used. aggregation_cardinality_limit: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure the aggregation cardinality limit. + If omitted or null, the metric reader's default cardinality limit is used. attribute_keys: $ref: common.yaml#/$defs/IncludeExclude + description: | + Configure attribute keys retained in the resulting stream(s). Aggregation: type: object additionalProperties: false @@ -386,16 +613,22 @@ $defs: properties: default: $ref: "#/$defs/DefaultAggregation" + description: TODO drop: $ref: "#/$defs/DropAggregation" + description: TODO explicit_bucket_histogram: $ref: "#/$defs/ExplicitBucketHistogramAggregation" + description: Configure aggregation to be explicit_bucket_histogram. base2_exponential_bucket_histogram: $ref: "#/$defs/Base2ExponentialBucketHistogramAggregation" + description: TODO last_value: $ref: "#/$defs/LastValueAggregation" + description: TODO sum: $ref: "#/$defs/SumAggregation" + description: TODO DefaultAggregation: type: - object @@ -417,10 +650,16 @@ $defs: minItems: 0 items: type: number + description: | + Configure bucket boundaries. + If omitted, [0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000] is used. record_min_max: type: - boolean - "null" + description: | + Configure record min and max. + If omitted or null, true is used. Base2ExponentialBucketHistogramAggregation: type: - object @@ -433,15 +672,18 @@ $defs: - "null" minimum: -10 maximum: 20 + description: TODO max_size: type: - integer - "null" minimum: 2 + description: TODO record_min_max: type: - boolean - "null" + description: TODO LastValueAggregation: type: - object @@ -459,11 +701,13 @@ $defs: properties: default_config: $ref: "#/$defs/ExperimentalMeterConfig" + description: Configure the default meter config used there is no matching entry in .meter_configurator/development.meters. meters: type: array minItems: 1 items: $ref: "#/$defs/ExperimentalMeterMatcherAndConfig" + description: Configure meters. ExperimentalMeterMatcherAndConfig: type: - object @@ -472,8 +716,14 @@ $defs: name: type: - string + description: | + Configure meter names to match, evaluated as follows: + + * If the meter name exactly matches. + * If the meter name matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. config: $ref: "#/$defs/ExperimentalMeterConfig" + description: The meter config. required: - name - config @@ -485,3 +735,4 @@ $defs: disabled: type: - boolean + description: Configure if the meter is enabled or not. diff --git a/schema/opentelemetry_configuration.yaml b/schema/opentelemetry_configuration.yaml index dbd51ad0..d6a8d509 100644 --- a/schema/opentelemetry_configuration.yaml +++ b/schema/opentelemetry_configuration.yaml @@ -4,28 +4,57 @@ additionalProperties: true properties: file_format: type: string + description: | + The file format version. + The yaml format is documented at + https://github.com/open-telemetry/opentelemetry-configuration/tree/main/schema disabled: type: - boolean - "null" + description: | + Configure if the SDK is disabled or not. + If omitted or null, false is used. log_level: type: - string - "null" + description: | + Configure the log level of the internal logger used by the SDK. + If omitted, info is used. attribute_limits: $ref: "#/$defs/AttributeLimits" + description: | + Configure general attribute limits. See also tracer_provider.limits, logger_provider.limits. logger_provider: $ref: "#/$defs/LoggerProvider" + description: | + Configure logger provider. + If omitted, a noop logger provider is used. meter_provider: $ref: "#/$defs/MeterProvider" + description: | + Configure meter provider. + If omitted, a noop meter provider is used. propagator: $ref: "#/$defs/Propagator" + description: | + Configure text map context propagators. + If omitted, a noop propagator is used. tracer_provider: $ref: "#/$defs/TracerProvider" + description: | + Configure tracer provider. + If omitted, a noop tracer provider is used. resource: $ref: "#/$defs/Resource" + description: | + Configure resource for all signals. + If omitted, the default resource is used. instrumentation/development: $ref: "#/$defs/ExperimentalInstrumentation" + description: | + Configure instrumentation. required: - file_format $defs: @@ -38,11 +67,19 @@ $defs: - integer - "null" minimum: 0 + description: | + Configure max attribute value size. + Value must be non-negative. + If omitted or null, there is no limit. attribute_count_limit: type: - integer - "null" minimum: 0 + description: | + Configure max attribute count. + Value must be non-negative. + If omitted or null, 128 is used. LoggerProvider: $ref: logger_provider.yaml MeterProvider: diff --git a/schema/propagator.yaml b/schema/propagator.yaml index 9c3e106a..29544f82 100644 --- a/schema/propagator.yaml +++ b/schema/propagator.yaml @@ -6,10 +6,19 @@ properties: minItems: 1 items: $ref: "#/$defs/TextMapPropagator" + description: | + Configure the propagators in the composite text map propagator. Entries from .composite_list are appended to the list here with duplicates filtered out. + Built-in propagator keys include: tracecontext, baggage, b3, b3multi, jaeger, ottrace. Known third party keys include: xray. + If the resolved list of propagators (from .composite and .composite_list) is empty, a noop propagator is used. composite_list: type: - string - "null" + description: | + Configure the propagators in the composite text map propagator. Entries are appended to .composite with duplicates filtered out. + The value is a comma separated list of propagator identifiers matching the format of OTEL_PROPAGATORS. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md#general-sdk-configuration for details. + Built-in propagator identifiers include: tracecontext, baggage, b3, b3multi, jaeger, ottrace. Known third party identifiers include: xray. + If the resolved list of propagators (from .composite and .composite_list) is empty, a noop propagator is used. $defs: TextMapPropagator: type: object @@ -22,16 +31,23 @@ $defs: properties: tracecontext: $ref: "#/$defs/TraceContextPropagator" + description: Include the w3c trace context propagator. baggage: $ref: "#/$defs/BaggagePropagator" + description: Include the w3c baggage propagator. b3: $ref: "#/$defs/B3Propagator" + description: Include the zipkin b3 propagator. b3multi: $ref: "#/$defs/B3MultiPropagator" + description: Include the zipkin b3 multi propagator. jaeger: $ref: "#/$defs/JaegerPropagator" + description: Include the jaeger propagator. ottrace: $ref: "#/$defs/OpenTracingPropagator" + description: Include the opentracing propagator. + isSdkExtensionPlugin: true TraceContextPropagator: type: - object diff --git a/schema/resource.yaml b/schema/resource.yaml index 0b2b1456..cfc8d5ed 100644 --- a/schema/resource.yaml +++ b/schema/resource.yaml @@ -6,16 +6,28 @@ properties: minItems: 1 items: $ref: "#/$defs/AttributeNameValue" + description: | + Configure resource attributes. Entries have higher priority than entries from .resource.attributes_list. detection/development: $ref: "#/$defs/ExperimentalResourceDetection" + description: | + Configure resource detection. + If omitted or null, resource detection is disabled. schema_url: type: - string - "null" + description: | + Configure resource schema URL. + If omitted or null, no schema URL is used. attributes_list: type: - string - "null" + description: | + Configure resource attributes. Entries have lower priority than entries from .resource.attributes. + The value is a list of comma separated key-value pairs matching the format of OTEL_RESOURCE_ATTRIBUTES. See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/configuration/sdk-environment-variables.md#general-sdk-configuration for details. + If omitted or null, no resource attributes are added. $defs: AttributeNameValue: type: object @@ -23,6 +35,8 @@ $defs: properties: name: type: string + description: | + The attribute name. value: oneOf: - type: string @@ -41,8 +55,15 @@ $defs: items: type: number minItems: 1 + description: | + The attribute value. + The type of value must match .type. type: $ref: "#/$defs/AttributeType" + description: | + The attribute type. + Values include: string, bool, int, double, string_array, bool_array, int_array, double_array. + If omitted or null, string is used. required: - name - value @@ -59,17 +80,31 @@ $defs: - bool_array - int_array - double_array + enumDescriptions: + bool: TODO + bool_array: TODO + double: TODO + double_array: TODO + int: TODO + int_array: TODO + string: TODO + string_array: TODO ExperimentalResourceDetection: type: object additionalProperties: false properties: attributes: $ref: common.yaml#/$defs/IncludeExclude + description: Configure attributes provided by resource detectors. detectors: type: array minItems: 1 items: $ref: "#/$defs/ExperimentalResourceDetector" + description: | + Configure resource detectors. + Resource detector names are dependent on the SDK language ecosystem. Please consult documentation for each respective language. + If omitted or null, no resource detectors are enabled. ExperimentalResourceDetector: type: object additionalProperties: @@ -81,12 +116,21 @@ $defs: properties: container: $ref: "#/$defs/ExperimentalContainerResourceDetector" + description: | + Enable the container resource detector, which populates container.* attributes. host: $ref: "#/$defs/ExperimentalHostResourceDetector" + description: | + Enable the host resource detector, which populates host.* and os.* attributes. process: $ref: "#/$defs/ExperimentalProcessResourceDetector" + description: | + Enable the process resource detector, which populates process.* attributes. service: $ref: "#/$defs/ExperimentalServiceResourceDetector" + description: | + Enable the service detector, which populates service.name based on the OTEL_SERVICE_NAME environment variable and service.instance.id. + isSdkExtensionPlugin: true ExperimentalContainerResourceDetector: type: - object diff --git a/schema/tracer_provider.yaml b/schema/tracer_provider.yaml index 9d8bab54..0163ac1d 100644 --- a/schema/tracer_provider.yaml +++ b/schema/tracer_provider.yaml @@ -6,12 +6,19 @@ properties: minItems: 1 items: $ref: "#/$defs/SpanProcessor" + description: Configure span processors. limits: $ref: "#/$defs/SpanLimits" + description: Configure span limits. See also attribute_limits. sampler: $ref: "#/$defs/Sampler" + description: | + Configure the sampler. + If omitted, parent based sampler with a root of always_on is used. tracer_configurator/development: $ref: "#/$defs/ExperimentalTracerConfigurator" + description: | + Configure tracers. required: - processors $defs: @@ -24,23 +31,38 @@ $defs: - integer - "null" minimum: 0 + description: | + Configure delay interval (in milliseconds) between two consecutive exports. + Value must be non-negative. + If omitted or null, 5000 is used. export_timeout: type: - integer - "null" minimum: 0 + description: | + Configure maximum allowed time (in milliseconds) to export data. + Value must be non-negative. A value of 0 indicates no limit (infinity). + If omitted or null, 30000 is used. max_queue_size: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure maximum queue size. Value must be positive. + If omitted or null, 2048 is used. max_export_batch_size: type: - integer - "null" exclusiveMinimum: 0 + description: | + Configure maximum batch size. Value must be positive. + If omitted or null, 512 is used. exporter: $ref: "#/$defs/SpanExporter" + description: Configure exporter. required: - exporter Sampler: @@ -54,18 +76,26 @@ $defs: properties: always_off: $ref: "#/$defs/AlwaysOffSampler" + description: Configure sampler to be always_off. always_on: $ref: "#/$defs/AlwaysOnSampler" + description: Configure sampler to be always_on. composite/development: $ref: "#/$defs/ExperimentalComposableSampler" + description: Configure sampler to be composite. jaeger_remote/development: $ref: "#/$defs/ExperimentalJaegerRemoteSampler" + description: TODO parent_based: $ref: "#/$defs/ParentBasedSampler" + description: Configure sampler to be parent_based. probability/development: $ref: "#/$defs/ExperimentalProbabilitySampler" + description: Configure sampler to be probability. trace_id_ratio_based: $ref: "#/$defs/TraceIdRatioBasedSampler" + description: Configure sampler to be trace_id_ratio_based. + isSdkExtensionPlugin: true AlwaysOffSampler: type: - object @@ -86,13 +116,16 @@ $defs: type: - string - "null" + description: TODO interval: type: - integer - "null" minimum: 0 + description: TODO initial_sampler: $ref: "#/$defs/Sampler" + description: TODO ParentBasedSampler: type: - object @@ -101,14 +134,29 @@ $defs: properties: root: $ref: "#/$defs/Sampler" + description: | + Configure root sampler. + If omitted or null, always_on is used. remote_parent_sampled: $ref: "#/$defs/Sampler" + description: | + Configure remote_parent_sampled sampler. + If omitted or null, always_on is used. remote_parent_not_sampled: $ref: "#/$defs/Sampler" + description: | + Configure remote_parent_not_sampled sampler. + If omitted or null, always_off is used. local_parent_sampled: $ref: "#/$defs/Sampler" + description: | + Configure local_parent_sampled sampler. + If omitted or null, always_on is used. local_parent_not_sampled: $ref: "#/$defs/Sampler" + description: | + Configure local_parent_not_sampled sampler. + If omitted or null, always_off is used. ExperimentalProbabilitySampler: type: - object @@ -121,6 +169,9 @@ $defs: - "null" minimum: 0 maximum: 1 + description: | + Configure ratio. + If omitted or null, 1.0 is used. TraceIdRatioBasedSampler: type: - object @@ -133,6 +184,9 @@ $defs: - "null" minimum: 0 maximum: 1 + description: | + Configure trace_id_ratio. + If omitted or null, 1.0 is used. ExperimentalComposableAlwaysOffSampler: type: - object @@ -151,14 +205,19 @@ $defs: properties: root: $ref: "#/$defs/ExperimentalComposableSampler" + description: Configures the sampler for spans with no parent. remote_parent_sampled: $ref: "#/$defs/ExperimentalComposableSampler" + description: Configures the sampler for spans with a remote parent that is sampled. remote_parent_not_sampled: $ref: "#/$defs/ExperimentalComposableSampler" + description: Configures the sampler for spans with a remote parent that is not sampled. local_parent_sampled: $ref: "#/$defs/ExperimentalComposableSampler" + description: Configures the sampler for spans with a local parent that is sampled. local_parent_not_sampled: $ref: "#/$defs/ExperimentalComposableSampler" + description: Configures the sampler for spans with a local parent that is not sampled. ExperimentalComposableProbabilitySampler: type: - object @@ -171,6 +230,9 @@ $defs: - "null" minimum: 0 maximum: 1 + description: | + Configure ratio. + If omitted or null, 1.0 is used. ExperimentalComposableSampler: type: object additionalProperties: @@ -182,18 +244,23 @@ $defs: properties: always_off: $ref: "#/$defs/ExperimentalComposableAlwaysOffSampler" + description: Configure sampler to be always_off. always_on: $ref: "#/$defs/ExperimentalComposableAlwaysOnSampler" + description: Configure sampler to be always_on. parent_based: $ref: "#/$defs/ExperimentalComposableParentBasedSampler" + description: Configure sampler to be parent_based. probability: $ref: "#/$defs/ExperimentalComposableProbabilitySampler" + description: Configure sampler to be probability. SimpleSpanProcessor: type: object additionalProperties: false properties: exporter: $ref: "#/$defs/SpanExporter" + description: Configure exporter. required: - exporter SpanExporter: @@ -207,14 +274,21 @@ $defs: properties: otlp_http: $ref: common.yaml#/$defs/OtlpHttpExporter + description: Configure exporter to be OTLP with HTTP transport. otlp_grpc: $ref: common.yaml#/$defs/OtlpGrpcExporter + description: Configure exporter to be OTLP with gRPC transport. otlp_file/development: $ref: common.yaml#/$defs/ExperimentalOtlpFileExporter + description: | + Configure exporter to be OTLP with file transport. console: $ref: common.yaml#/$defs/ConsoleExporter + description: Configure exporter to be console. zipkin: $ref: "#/$defs/ZipkinSpanExporter" + description: Configure exporter to be zipkin. + isSdkExtensionPlugin: true SpanLimits: type: object additionalProperties: false @@ -224,31 +298,55 @@ $defs: - integer - "null" minimum: 0 + description: | + Configure max attribute value size. Overrides .attribute_limits.attribute_value_length_limit. + Value must be non-negative. + If omitted or null, there is no limit. attribute_count_limit: type: - integer - "null" minimum: 0 + description: | + Configure max attribute count. Overrides .attribute_limits.attribute_count_limit. + Value must be non-negative. + If omitted or null, 128 is used. event_count_limit: type: - integer - "null" minimum: 0 + description: | + Configure max span event count. + Value must be non-negative. + If omitted or null, 128 is used. link_count_limit: type: - integer - "null" minimum: 0 + description: | + Configure max span link count. + Value must be non-negative. + If omitted or null, 128 is used. event_attribute_count_limit: type: - integer - "null" minimum: 0 + description: | + Configure max attributes per span event. + Value must be non-negative. + If omitted or null, 128 is used. link_attribute_count_limit: type: - integer - "null" minimum: 0 + description: | + Configure max attributes per span link. + Value must be non-negative. + If omitted or null, 128 is used. SpanProcessor: type: object additionalProperties: @@ -260,8 +358,11 @@ $defs: properties: batch: $ref: "#/$defs/BatchSpanProcessor" + description: Configure a batch span processor. simple: $ref: "#/$defs/SimpleSpanProcessor" + description: Configure a simple span processor. + isSdkExtensionPlugin: true ZipkinSpanExporter: type: - object @@ -272,11 +373,18 @@ $defs: type: - string - "null" + description: | + Configure endpoint. + If omitted or null, http://localhost:9411/api/v2/spans is used. timeout: type: - integer - "null" minimum: 0 + description: | + Configure max time (in milliseconds) to wait for each export. + Value must be non-negative. A value of 0 indicates indefinite. + If omitted or null, 10000 is used. ExperimentalTracerConfigurator: type: - object @@ -284,11 +392,13 @@ $defs: properties: default_config: $ref: "#/$defs/ExperimentalTracerConfig" + description: Configure the default tracer config used there is no matching entry in .tracer_configurator/development.tracers. tracers: type: array minItems: 1 items: $ref: "#/$defs/ExperimentalTracerMatcherAndConfig" + description: Configure tracers. ExperimentalTracerMatcherAndConfig: type: - object @@ -297,8 +407,14 @@ $defs: name: type: - string + description: | + Configure tracer names to match, evaluated as follows: + + * If the tracer name exactly matches. + * If the tracer name matches the wildcard pattern, where '?' matches any single character and '*' matches any number of characters including none. config: $ref: "#/$defs/ExperimentalTracerConfig" + description: The tracer config. required: - name - config @@ -310,3 +426,4 @@ $defs: disabled: type: - boolean + description: Configure if the tracer is enabled or not. diff --git a/scripts/compile-schema.js b/scripts/compile-schema.js new file mode 100644 index 00000000..0d5049d0 --- /dev/null +++ b/scripts/compile-schema.js @@ -0,0 +1,133 @@ +import fs from 'fs'; +import {schemaOutDirPath} from "./util.js"; +import yaml from "yaml"; +import {readSourceSchema} from "./source-schema.js"; + +// Delete and recreate schema out directory +fs.rmSync(schemaOutDirPath, {recursive: true, force: true}); +fs.mkdirSync(schemaOutDirPath); + +// Read source schema +const {sourceContentByFile, sourceTypesByType} = readSourceSchema(); + +// Validate source types and exit early if there are any errors +const messages = []; +Object.entries(sourceTypesByType).forEach(([type, sourceSchemaType]) => { + allPropertiesShouldHaveDescriptions(sourceSchemaType, messages); + allEnumValuesShouldHaveDescriptions(sourceSchemaType, messages); + sdkExtensionPluginSchema(sourceSchemaType, messages); + noSubschemas(sourceSchemaType, messages); +}); +if (messages.length > 0) { + messages.forEach(message => console.log(message)); + process.exit(1); +} + +// If we make it here, source schema is valid. + +// Replace refs with new JSON file paths +Object.keys(sourceContentByFile).forEach(file => { + const jsonFile = file.replace('.yaml', '.json'); + Object.entries(sourceContentByFile).forEach(([otherFile, otherContent]) => { + const otherContentString = yaml.stringify(otherContent, {lineWidth: 0}); + sourceContentByFile[otherFile] = yaml.parse(otherContentString.replaceAll(`$ref: ${file}`, `$ref: ${jsonFile}`)); + }); +}); + +// For each file, massage the schema a bit and write it to the output directory in JSON format. +Object.entries(sourceContentByFile).forEach(([file, content]) => { + const jsonFile = file.replace('.yaml', '.json'); + + // Remove bits which are not part of JSON schema spec + stripExtraSourceSchemaMetadata(content); + const defs = content['$defs']; + if (defs) { + Object.values(defs).forEach(type => stripExtraSourceSchemaMetadata(type)); + } + + // Annotate with constant info + const annotated = { + '$id': `https://opentelemetry.io/otelconfig/${jsonFile}`, + '$schema': 'https://json-schema.org/draft/2020-12/schema', + ...content + } + + fs.writeFileSync(schemaOutDirPath + jsonFile, JSON.stringify(annotated, null, 2)); +}); + +// Helper functions + +function stripExtraSourceSchemaMetadata(type) { + delete type['enumDescriptions']; + delete type['isSdkExtensionPlugin']; +} + +// Validation functions + +function allPropertiesShouldHaveDescriptions(sourceSchemaType, messages) { + sourceSchemaType.properties.forEach(property => { + if (!property.schema.description) { + messages.push(`Please add 'description' to ${sourceSchemaType.type}.${property.property}.`); + } + }); +} + +function allEnumValuesShouldHaveDescriptions(sourceSchemaType, messages) { + if (!sourceSchemaType.isEnumType()) { + return; + } + const enumDescriptions = sourceSchemaType.schema['enumDescriptions']; + if (!enumDescriptions) { + messages.push(`Please add 'enumDescriptions' to ${sourceSchemaType.type}.`); + return; + } + sourceSchemaType.enumValues.forEach(enumValue => { + if (!enumDescriptions[enumValue]) { + messages.push(`Please add entry for ${enumValue} to 'enumDescriptions' for ${sourceSchemaType.type}.`); + } + }); + Object.keys(enumDescriptions).forEach(enumValue => { + if (!sourceSchemaType.enumValues.includes(enumValue)) { + messages.push(`Please remove entry for ${enumValue} from 'enumDescriptions' for ${sourceSchemaType.type}.`); + } + }); +} + +function sdkExtensionPluginSchema(sourceSchemaType, messages) { + const sdkExtensionPluginAdditionalProperties = JSON.stringify({"type": ["object", "null"]}); + + const schema = sourceSchemaType.schema; + if (!schema['isSdkExtensionPlugin']) { + return; + } + const type = schema['type']; + if (type !== 'object') { + messages.push(`Please set 'type' to 'object' for ${sourceSchemaType.type}.`); + } + const additionalProperties = schema['additionalProperties']; + if (JSON.stringify(additionalProperties) !== sdkExtensionPluginAdditionalProperties) { + messages.push(`Please set 'additionalProperties' to ${sdkExtensionPluginAdditionalProperties} for ${sourceSchemaType.type}.`); + } + const minProperties = schema['minProperties']; + if (minProperties !== 1) { + messages.push(`Please set 'minProperties' to 1 for ${sourceSchemaType.type}.`); + } + const maxProperties = schema['maxProperties']; + if (maxProperties !== 1) { + messages.push(`Please set 'maxProperties' to 1 for ${sourceSchemaType.type}.`); + } +} + +function noSubschemas(sourceSchemaType, messages) { + if (sourceSchemaType.isEnumType()) { + return; + } + sourceSchemaType.properties.forEach(property => { + property.types.forEach(type => { + if (type === 'object') { + messages.push(`Please move subschema for ${sourceSchemaType.type}.${property.property} to top level type in $defs.`) + } + }); + }); + +} diff --git a/scripts/fix-language-implementations.js b/scripts/fix-language-implementations.js new file mode 100644 index 00000000..65648e3a --- /dev/null +++ b/scripts/fix-language-implementations.js @@ -0,0 +1,12 @@ +import { + readAndFixLanguageImplementations, writeLanguageImplementations +} from "./language-implementations.js"; + +const { messages, languageImplementations } = readAndFixLanguageImplementations(); + +writeLanguageImplementations(languageImplementations); + +// Write messages to console +messages.forEach(message => { + console.log(message); +}); diff --git a/scripts/fix-meta-schema.js b/scripts/fix-meta-schema.js deleted file mode 100644 index 1d91f545..00000000 --- a/scripts/fix-meta-schema.js +++ /dev/null @@ -1,12 +0,0 @@ -import { - readAndFixMetaSchema, writeMetaSchema -} from "./meta-schema.js"; - -const { messages, metaSchema } = readAndFixMetaSchema(); - -writeMetaSchema(metaSchema); - -// Write messages to console -messages.forEach(message => { - console.log(message); -}); diff --git a/scripts/generate-markdown.js b/scripts/generate-markdown.js index f8755fa3..60301d11 100644 --- a/scripts/generate-markdown.js +++ b/scripts/generate-markdown.js @@ -1,17 +1,19 @@ -import {readJsonSchemaTypes} from "./json-schema.js"; -import {KNOWN_LANGUAGES, readAndFixMetaSchema} from "./meta-schema.js"; +import {KNOWN_LANGUAGES, readAndFixLanguageImplementations} from "./language-implementations.js"; import fs from "node:fs"; -import {isExperimentalProperty, isExperimentalType, markdownDocPath, rootTypeName} from "./util.js"; +import {isExperimentalProperty, isExperimentalType, markdownDocPath, rootTypeName, schemaOutDirPath} from "./util.js"; +import {readSourceSchema} from "./source-schema.js"; -const { messages, metaSchema } = readAndFixMetaSchema(); +const { sourceTypesByType } = readSourceSchema(); +const { messages, languageImplementations } = readAndFixLanguageImplementations(); if (messages.length > 0) { - throw new Error("Meta schema has problems. Please run fix-meta-schema and try again."); + throw new Error("Language implementations have problems. Please run fix-language-implementations and try again."); } -const jsonSchemaTypes = readJsonSchemaTypes(); -const jsonSchemaTypesByType = {}; -jsonSchemaTypes.forEach(type => jsonSchemaTypesByType[type.type] = type); +const outputSchemaByFile = {}; +fs.readdirSync(schemaOutDirPath).forEach(file => { + outputSchemaByFile[file] = JSON.parse(fs.readFileSync(schemaOutDirPath + file, 'utf8')); +}); const output = []; @@ -26,15 +28,15 @@ This document is an auto-generated view of the declarative configuration JSON sc `); - +const allTypes = Object.values(sourceTypesByType); const types = []; const experimentalTypes = []; -metaSchema.types.sort((a, b) => a.type.localeCompare(b.type)); -metaSchema.types.forEach(metaSchemaType => { - if (isExperimentalType(metaSchemaType.type)) { - experimentalTypes.push(metaSchemaType); +allTypes.sort((a, b) => a.type.localeCompare(b.type)); +allTypes.forEach(sourceSchemaType => { + if (isExperimentalType(sourceSchemaType.type)) { + experimentalTypes.push(sourceSchemaType); } else { - types.push(metaSchemaType); + types.push(sourceSchemaType); } }); @@ -46,13 +48,9 @@ types.forEach(writeType); addHeader('Experimental Types', 'experimental-types', 1); experimentalTypes.forEach(writeType); -function writeType(metaSchemaType) { - const type = metaSchemaType.type; - const jsonSchemaType = jsonSchemaTypesByType[type]; - if (!jsonSchemaType) { - throw new Error(`JSON schema type not found for meta schema type ${type}.`); - } - const required = jsonSchemaType.schema['required']; +function writeType(sourceSchemaType) { + const type = sourceSchemaType.type; + const required = sourceSchemaType.schema['required']; // Heading addHeader(type, type.toLowerCase(), 2); @@ -64,44 +62,42 @@ function writeType(metaSchemaType) { } // SDK extension plugin - if (metaSchemaType.isSdkExtensionPlugin) { + if (sourceSchemaType.schema.isSdkExtensionPlugin) { output.push(`\`${type}\` is an [SDK extension plugin](#sdk-extension-plugins).\n\n`); } - if (jsonSchemaType.isEnumType()) { + if (sourceSchemaType.isEnumType()) { // Enum values output.push("This is a enum type.\n\n"); output.push(`| Value | Description |\n`); output.push(`|---|---|\n`); - metaSchemaType.enumValues.forEach(enumValue => { - const formattedDescription = enumValue.description.split("\n").join("
"); - output.push(`| \`${enumValue.enumValue}\` | ${formattedDescription} |\n`); + sourceSchemaType.sortedEnumValues().forEach(enumValue => { + const description = sourceSchemaType.schema['enumDescriptions'][enumValue]; + const formattedDescription = description.split("\n").join("
"); + output.push(`| \`${enumValue}\` | ${formattedDescription} |\n`); }); output.push('\n'); } else { // Properties - if (metaSchemaType.properties.length === 0) { + const properties = sourceSchemaType.sortedProperties(); + if (properties.length === 0) { output.push("No properties.\n\n"); } else { // Property type and description table output.push(`| Property | Type | Required? | Constraints | Description |\n`); output.push("|---|---|---|---|---|\n"); - metaSchemaType.properties.forEach(property => { - const jsonSchemaProperty = jsonSchemaType.properties.find(item => item.property === property.property); - if (!jsonSchemaProperty) { - throw new Error(`JSON schema property not found for property ${property.property} and type ${type}.`); - } - let formattedProperty = `\`${property.property}\`` - if (isExperimentalProperty(property.property)) { + properties.forEach(sourceSchemaProperty => { + let formattedProperty = `\`${sourceSchemaProperty.property}\`` + if (isExperimentalProperty(sourceSchemaProperty.property)) { formattedProperty += '
**WARNING:** This property is [experimental](README.md#experimental-features).' } - const formattedPropertyType = formatJsonSchemaPropertyType(jsonSchemaProperty, jsonSchemaTypesByType); - const isRequired = required !== undefined && required.includes(property.property); - let formattedConstraints = resolveAndFormatConstraints(jsonSchemaProperty.schema, '
'); + const formattedPropertyType = formatPropertyType(sourceSchemaProperty, sourceTypesByType); + const isRequired = required !== undefined && required.includes(sourceSchemaProperty.property); + let formattedConstraints = resolveAndFormatConstraints(sourceSchemaProperty.schema, '
'); if (formattedConstraints.length === 0) { formattedConstraints = 'No constraints.'; } - const formattedDescription = property.description.split("\n").join("
"); + const formattedDescription = sourceSchemaProperty.schema.description.split("\n").join("
"); output.push(`| ${formattedProperty} | ${formattedPropertyType} | \`${isRequired}\` | ${formattedConstraints} | ${formattedDescription} |\n`); }); @@ -110,12 +106,12 @@ function writeType(metaSchemaType) { } // Write language support status for type - if ((jsonSchemaType.isEnumType() && metaSchemaType.enumValues.length > 0) || (!jsonSchemaType.isEnumType() && jsonSchemaType.properties.length > 0)) { + if ((sourceSchemaType.isEnumType() && sourceSchemaType.enumValues.length > 0) || (!sourceSchemaType.isEnumType() && sourceSchemaType.properties.length > 0)) { output.push(`
\n`); output.push('Language support status\n\n'); const languageImplementationsByLanguage = {}; - metaSchema.languageImplementations.forEach(languageImplementation => languageImplementationsByLanguage[languageImplementation.language] = languageImplementation); - const rowHeader = jsonSchemaType.isEnumType() ? 'Value' : 'Property'; + languageImplementations.forEach(languageImplementation => languageImplementationsByLanguage[languageImplementation.language] = languageImplementation); + const rowHeader = sourceSchemaType.isEnumType() ? 'Value' : 'Property'; output.push(`| ${rowHeader} |`); KNOWN_LANGUAGES.forEach(language => { output.push(` [${language}](#${language}) |`); @@ -127,22 +123,22 @@ function writeType(metaSchemaType) { output.push('|---|'); KNOWN_LANGUAGES.forEach(language => output.push(`---|`)); output.push('\n'); - if (jsonSchemaType.isEnumType()) { - metaSchemaType.enumValues.forEach(enumValue => { - output.push(`| \`${enumValue.enumValue}\` |`); + if (sourceSchemaType.isEnumType()) { + sourceSchemaType.sortedEnumValues().forEach(enumValue => { + output.push(`| \`${enumValue}\` |`); KNOWN_LANGUAGES.forEach(language => { const typeSupportStatus = languageImplementationsByLanguage[language].typeSupportStatuses.find(item => item.type === type); if (!typeSupportStatus) { throw new Error(`Meta schema LanguageImplementation for language ${language} missing type ${type}.`); } - const enumValueOverride = typeSupportStatus.enumOverrides.find(enumOverride => enumOverride.enumValue === enumValue.enumValue); + const enumValueOverride = typeSupportStatus.enumOverrides.find(enumOverride => enumOverride.enumValue === enumValue); const status = enumValueOverride ? enumValueOverride.status : typeSupportStatus.status; output.push(` ${status} |`); }); output.push('\n'); }); } else { - metaSchemaType.properties.forEach(property => { + sourceSchemaType.sortedProperties().forEach(property => { output.push(`| \`${property.property}\` |`); KNOWN_LANGUAGES.forEach(language => { const typeSupportStatus = languageImplementationsByLanguage[language].typeSupportStatuses.find(item => item.type === type); @@ -161,7 +157,7 @@ function writeType(metaSchemaType) { } // Constraints - const formattedConstraints = resolveAndFormatConstraints(jsonSchemaType.schema, '\n'); + const formattedConstraints = resolveAndFormatConstraints(sourceSchemaType.schema, '\n'); if (formattedConstraints.length > 0) { output.push('Constraints: \n\n'); output.push(formattedConstraints); @@ -172,10 +168,10 @@ function writeType(metaSchemaType) { // Usages const usages = []; - jsonSchemaTypes.forEach(otherJsonSchemaType => { - otherJsonSchemaType.properties.forEach(property => { + Object.values(sourceTypesByType).forEach(otherSourceType => { + otherSourceType.properties.forEach(property => { if (property.types.find(item => item === type)) { - usages.push([ otherJsonSchemaType, property ]); + usages.push([ otherSourceType, property ]); } }); }); @@ -190,17 +186,59 @@ function writeType(metaSchemaType) { // JSON schema collapsible section output.push(`
\n`); output.push(`JSON Schema\n\n`); - output.push(`[JSON Schema Source File](./schema/${jsonSchemaType.sourceFile})\n`) - output.push(`
${JSON.stringify(jsonSchemaType.schema, null, 2)}
\n`); + output.push(`[JSON Schema Source File](./schema/${sourceSchemaType.sourceFile})\n`); + // cleanSchema is a temp hack to minimize the diff while merging the meta schema + // TODO: come back and remove + const schemaSource = cleanSchema(getSchemaSource(sourceSchemaType)); + output.push(`
${JSON.stringify(schemaSource, null, 2)}
\n`); output.push(`
\n`); output.push('\n'); } +function getSchemaSource(sourceSchemaType) { + const outputFile = sourceSchemaType.sourceFile.replace('.yaml', '.json'); + const outputFileContent = outputSchemaByFile[outputFile]; + if (!outputSchemaByFile[outputFile]) { + throw new Error(`Output schema file ${outputFile} not found. Please run "make generate-schema" first.`); + } + if (sourceSchemaType.jsonSchemaPath === '.') { + return outputFileContent; + } + const defs = outputFileContent['$defs']; + if (!defs) { + throw new Error(`Output schema file ${outputFile} does not have $defs.`); + } + let def = defs[sourceSchemaType.type]; + if (!def) { + throw new Error(`Output schema file ${outputFile} does not $def entry for ${sourceSchemaType.type}.`); + } + return def; +} + +function cleanSchema(schemaSource) { + const adjustedSchema = JSON.parse(JSON.stringify(schemaSource)); + const properties = adjustedSchema.properties; + if (properties) { + Object.values(properties).forEach(property => { + delete property.description; + }); + } + const defs = adjustedSchema['$defs']; + if (defs) { + const adjustedDefs = {}; + Object.entries(defs).forEach(([key, value]) => { + adjustedDefs[key] = cleanSchema(value); + }); + adjustedSchema['$defs'] = adjustedDefs; + } + return adjustedSchema; +} + // Write language support status addHeader('Language Support Status', 'language-support-status', 1); KNOWN_LANGUAGES.forEach(language => { addHeader(language, language, 2); - const languageImplementation = metaSchema.languageImplementations.find(item => item.language === language); + const languageImplementation = languageImplementations.find(item => item.language === language); if (!languageImplementation) { throw new Error(`Meta schema LanguageImplementation not found for language ${language}.`); } @@ -209,9 +247,9 @@ KNOWN_LANGUAGES.forEach(language => { output.push(`| Type | Status | Notes | Support Status Details |\n`); output.push(`|---|---|---|---|\n`); languageImplementation.typeSupportStatuses.forEach(typeSupportStatus => { - const metaSchemaType = metaSchema.types.find(item => item.type === typeSupportStatus.type); - if (!metaSchemaType) { - throw new Error(`MetaSchemaType not found for type ${typeSupportStatus.type}.`); + const sourceSchemaType = allTypes.find(item => item.type === typeSupportStatus.type); + if (!sourceSchemaType) { + throw new Error(`SourceSchemaType not found for type ${typeSupportStatus.type}.`); } let formattedNotes = typeSupportStatus.notes; @@ -221,17 +259,17 @@ KNOWN_LANGUAGES.forEach(language => { const supportStatusDetails = []; - if (metaSchemaType.properties !== null) { - metaSchemaType.properties.forEach(metaSchemaProperty => { - const propertyOverride = typeSupportStatus.propertyOverrides.find(propertyOverride => propertyOverride.property === metaSchemaProperty.property); + if (!sourceSchemaType.isEnumType()) { + sourceSchemaType.sortedProperties().forEach(sourceSchemaProperty => { + const propertyOverride = typeSupportStatus.propertyOverrides.find(propertyOverride => propertyOverride.property === sourceSchemaProperty.property); const status = propertyOverride ? propertyOverride.status : typeSupportStatus.status; - supportStatusDetails.push(`* \`${metaSchemaProperty.property}\`: ${status}
`); + supportStatusDetails.push(`* \`${sourceSchemaProperty.property}\`: ${status}
`); }); } else { - metaSchemaType.enumValues.forEach(metaSchemaEnumValue => { - const enumValueOverride = typeSupportStatus.enumOverrides.find(enumOverride => enumOverride.enumValue === metaSchemaEnumValue.enumValue); + sourceSchemaType.sortedEnumValues().forEach(enumValue => { + const enumValueOverride = typeSupportStatus.enumOverrides.find(enumOverride => enumOverride.enumValue === enumValue); const status = enumValueOverride ? enumValueOverride.status : typeSupportStatus.status; - supportStatusDetails.push(`* \`${metaSchemaEnumValue.enumValue}\`: ${status}
`); + supportStatusDetails.push(`* \`${enumValue}\`: ${status}
`); }); } @@ -252,29 +290,29 @@ Each of the following types support referencing custom interface implementations SDK extension plugin types may have properties defined corresponding to built-in implementations of the interface. For example, the \`otlp_http\` property of \`SpanExporter\` defines the OTLP http/protobuf exporter. `); -metaSchema.types.filter(metaSchemaType => metaSchemaType.isSdkExtensionPlugin) - .forEach(metaSchemaType => { - output.push(`* [${metaSchemaType.type}](#${metaSchemaType.type})\n`) +allTypes.filter(sourceSchemaType => sourceSchemaType.schema.isSdkExtensionPlugin) + .forEach(sourceSchemaType => { + output.push(`* [${sourceSchemaType.type}](#${sourceSchemaType.type})\n`) }); output.unshift('\n\n') fs.writeFileSync(markdownDocPath, output.join("")); // Helper functions -function formatJsonSchemaPropertyType(jsonSchemaProperty, jsonSchemaTypesByType) { +function formatPropertyType(sourceProperty, sourceTypesByType) { const output = []; - if (jsonSchemaProperty.isSeq) { + if (sourceProperty.isSeq) { output.push('`array` of '); } let prefix = ''; let suffix = ''; - if (jsonSchemaProperty.types.length > 1) { + if (sourceProperty.types.length > 1) { output.push('one of:
'); prefix = '* '; suffix = '
'; } - jsonSchemaProperty.types.forEach(type => { - let resolvedType = jsonSchemaTypesByType[type]; + sourceProperty.types.forEach(type => { + let resolvedType = sourceTypesByType[type]; output.push(prefix); output.push(resolvedType ? `[\`${resolvedType.type}\`](#${resolvedType.type.toLowerCase()})` : `\`${type}\``) output.push(suffix); diff --git a/scripts/language-implementations.js b/scripts/language-implementations.js new file mode 100644 index 00000000..ca96b590 --- /dev/null +++ b/scripts/language-implementations.js @@ -0,0 +1,342 @@ +import fs from 'fs'; +import yaml from 'yaml'; +import { + metaSchemaLanguageStatusPath, + metaSchemaLanguageStatusFileName, + schemaSourceDirPath, + metaSchemaLanguageFilePrefix, + isExperimentalType +} from "./util.js"; +import {readSourceSchema} from "./source-schema.js"; + +export const KNOWN_LANGUAGES = [ + 'cpp', + 'go', + 'java', + 'js', +]; + +const IMPLEMENTATION_STATUS_UNKNOWN = 'unknown'; +const IMPLEMENTATION_STATUSES = ['supported', IMPLEMENTATION_STATUS_UNKNOWN, 'not_implemented', 'ignored', 'not_applicable'] + +export function writeLanguageImplementations(languageImplementations) { + languageImplementations.forEach(languageImplementation => { + fs.writeFileSync(metaSchemaLanguageStatusPath(languageImplementation.language), yaml.stringify(languageImplementation.toJson(), {lineWidth: 0})); + }); +} + +export function readAndFixLanguageImplementations() { + // Track messages tracking schema fixes + const messages = []; + + const { sourceTypesByType } = readSourceSchema(); + + // Parse meta schema language implementations and sanitize + const metaSchemaLanguageImplementations = []; + KNOWN_LANGUAGES.forEach(language => { + let metaSchemaLanguageStatusContent; + try { + metaSchemaLanguageStatusContent = fs.readFileSync(metaSchemaLanguageStatusPath(language), "utf-8"); + } catch (error) { + messages.push(`Error reading ${metaSchemaLanguageStatusFileName(language)}: ${error.message}. Skipping.`); + return; + } + const metaSchemaLanguageStatusDoc = yaml.parse(metaSchemaLanguageStatusContent); + const languageImplementation = LanguageImplementation.parseJson(language, metaSchemaLanguageStatusDoc, messages); + metaSchemaLanguageImplementations.push(languageImplementation); + }); + + const languageImplementations = reconcileLanguageImplementations(metaSchemaLanguageImplementations, sourceTypesByType, messages); + + return {messages, languageImplementations}; +} + +// Types + +export class LanguageImplementation { + language; + latestSupportedFileFormat; + typeSupportStatuses; + + constructor(language, latestSupportedFileFormat, typeSupportStatuses) { + this.language = language; + this.latestSupportedFileFormat = latestSupportedFileFormat; + this.typeSupportStatuses = typeSupportStatuses; + } + + toJson() { + // Types in lexicographical order, with non-experimental first + const typeSupportStatuses = this.typeSupportStatuses.map(typeSupportStatus => typeSupportStatus.toJson()); + typeSupportStatuses.sort((a, b) => { + const differentMaturities = isExperimentalType(a.type) - isExperimentalType(b.type); + return differentMaturities === 0 ? a.type.localeCompare(b.type) : +differentMaturities; + }); + + return { + latestSupportedFileFormat: this.latestSupportedFileFormat, + typeSupportStatuses + }; + } + + static parseJson(language, rawJson, messages) { + const latestSupportedFileFormat = parseString(rawJson, 'latestSupportedFileFormat', `LanguageImplementation has invalid 'latestSupportedFileFormat'`); + const typeSupportStatuses = parseArray( + rawJson, + 'typeSupportStatuses', + entryJson => TypeSupportStatus.parseJson(entryJson, messages), + `LanguageImplementation '${language}' has invalid 'typeSupportStatuses'`, + error => `LanguageImplementation '${language}' has invalid TypeSupportStatus: ${error.message}. Skipping.`, + messages); + return new LanguageImplementation(language, latestSupportedFileFormat, typeSupportStatuses); + } +} + +export class TypeSupportStatus { + type; + status; + propertyOverrides; // null if enum + enumOverrides; // null if not enum + notes; + + constructor(type, status, propertyOverrides, enumOverrides, notes) { + this.type = type; + this.status = status; + this.propertyOverrides = propertyOverrides; + this.enumOverrides = enumOverrides; + this.notes = notes; + } + + toJson() { + const json = {type: this.type, status: this.status}; + + if (this.enumOverrides !== null) { + const enumOverrides = this.enumOverrides.map(enumValueStatus => enumValueStatus.toJson()); + enumOverrides.sort((a, b) => a.enumValue.localeCompare(b.enumValue)); + json.enumOverrides = enumOverrides; + } + if (this.propertyOverrides !== null) { + const propertyOverrides = this.propertyOverrides.map(propertyStatus => propertyStatus.toJson()); + propertyOverrides.sort((a, b) => a.property.localeCompare(b.property)); + json.propertyOverrides = propertyOverrides; + } + if(this.notes !== null) { + json.notes = this.notes; + } + + return json; + } + + static parseJson(rawJson, messages) { + const type = parseString(rawJson, 'type', `TypeSupportStatus has invalid 'type'`); + const status = parseEnum(rawJson, 'status', `TypeSupportStatus has invalid 'status'`, IMPLEMENTATION_STATUSES); + const propertyOverrides = parseArray( + rawJson, + 'propertyOverrides', + entryJson => PropertyStatus.parseJson(entryJson, messages), + `TypeSupportStatus '${type}' has invalid 'propertyOverrides'`, + error => `TypeSupportStatus '${type}' has invalid PropertyStatus: ${error.message}. Skipping.`, + messages, + true); + const enumOverrides = parseArray( + rawJson, + 'enumOverrides', + entryJson => EnumValueStatus.parseJson(entryJson, messages), + `TypeSupportStatus '${type}' has invalid 'enumOverrides'`, + error => `TypeSupportStatus '${type}' has invalid EnumValueStatus: ${error.message}. Skipping.`, + messages, + true); + const notes = parseString(rawJson, 'notes', `TypeSupportStatus has invalid 'notes'`, true); + return new TypeSupportStatus(type, status, propertyOverrides, enumOverrides, notes); + } +} + +export class PropertyStatus { + property; + status; + + constructor(property, status) { + this.property = property; + this.status = status; + } + + toJson() { + return { + property: this.property, + status: this.status + }; + } + + static parseJson(rawJson, messages) { + const property = parseString(rawJson, 'property', `PropertyStatus has invalid 'property'`); + const status = parseEnum(rawJson, 'status', `PropertyStatus has invalid 'status'`, IMPLEMENTATION_STATUSES); + return new PropertyStatus(property, status); + } +} + +export class EnumValueStatus { + enumValue; + status; + + constructor(enumValue, status) { + this.enumValue = enumValue; + this.status = status; + } + + toJson() { + return { + enumValue: this.enumValue, + status: this.status + }; + } + + static parseJson(rawJson, messages) { + const enumValue = parseString(rawJson, 'enumValue', `EnumValueStatus has invalid 'enumValue'`); + const status = parseEnum(rawJson, 'status', `EnumValueStatus has invalid 'status'`, IMPLEMENTATION_STATUSES); + return new EnumValueStatus(enumValue, status); + } +} + +// Helper functions + +function reconcileLanguageImplementations(languageImplementations, sourceTypesByType, messages) { + const languageImplementationsByLanguage = {}; + languageImplementations.forEach(languageImplementation => languageImplementationsByLanguage[languageImplementation.language] = languageImplementation); + + // Find any types in both json schema and meta schema and make sure all json schema properties match + Object.entries(languageImplementationsByLanguage).forEach(([language, languageImplementation]) => { + if (!KNOWN_LANGUAGES.includes(language)) { + return; + } + const reconciledTypeSupportStatuses = []; + const expectedTypeSupportStatuses = emptyLanguageImplementation(language, sourceTypesByType).typeSupportStatuses; + + // Remove extra types + languageImplementation.typeSupportStatuses.forEach(typeSupportStatus => { + const sourceSchemaType = sourceTypesByType[typeSupportStatus.type]; + if (!sourceSchemaType) { + messages.push(`LanguageImplementation ${language} has type ${typeSupportStatus.type} not in source. Removing.`); + return; + } + if (!sourceSchemaType.isEnumType()) { + if (typeSupportStatus.propertyOverrides === null) { + messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} is missing propertyOverrides and is not an enum type. Adding.`); + typeSupportStatus.propertyOverrides = []; + } + if (typeSupportStatus.enumOverrides !== null) { + messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has propertyOverrides but is not an enum type. Removing.`); + typeSupportStatus.propertyOverrides = null; + } + // Remove any propertyOverrides which occur in meta schema but not json schema + const reconciledPropertyOverrides = []; + typeSupportStatus.propertyOverrides.forEach(propertyStatus => { + if (!sourceSchemaType.properties.find(sourceSchemaProperty => sourceSchemaProperty.property === propertyStatus.property)) { + messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has propertyOverride ${propertyStatus.property} not in source. Removing.`); + return; + } + reconciledPropertyOverrides.push(propertyStatus); + }); + typeSupportStatus.propertyOverrides = reconciledPropertyOverrides; + } else { + if (typeSupportStatus.enumOverrides === null) { + messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} is missing enumOverrides and is an enum type. Adding.`); + typeSupportStatus.enumOverrides = []; + } + if (typeSupportStatus.propertyOverrides !== null) { + messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has propertyOverrides but is an enum type. Removing.`); + typeSupportStatus.propertyOverrides = null; + } + // Remove any enumOverrides which occur in meta schema but not json schema + const reconciledEnumOverrides = []; + typeSupportStatus.enumOverrides.forEach(enumValueStatus => { + if (!sourceSchemaType.enumValues.includes(enumValueStatus.enumValue)) { + messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has enumOverride ${enumValueStatus.enumValue} not in source. Removing.`); + return; + } + reconciledEnumOverrides.push(enumValueStatus); + }); + typeSupportStatus.enumOverrides = reconciledEnumOverrides; + } + + reconciledTypeSupportStatuses.push(typeSupportStatus); + }); + + // Add missing types + expectedTypeSupportStatuses.forEach(typeSupportStatus => { + if (!reconciledTypeSupportStatuses.find(item => item.type === typeSupportStatus.type)) { + messages.push(`LanguageImplementation ${language} is missing type ${typeSupportStatus.type} in meta schema. Adding.`); + reconciledTypeSupportStatuses.push(typeSupportStatus); + } + }); + + languageImplementation.typeSupportStatuses = reconciledTypeSupportStatuses; + }); + + // Find and remove any language implementations which are extra + fs.readdirSync(schemaSourceDirPath) + .filter(file => file.startsWith(metaSchemaLanguageFilePrefix)) + .filter(file => !KNOWN_LANGUAGES.some(language => metaSchemaLanguageStatusFileName(language) === file)) + .forEach(file => { + messages.push(`LanguageImplementation file ${file} found for unrecognized language. Removing.`); + fs.unlinkSync(schemaSourceDirPath + file); + }); + + // Find and add any language implementations not in meta schema + KNOWN_LANGUAGES.forEach(language => { + const languageImplementation = languageImplementationsByLanguage[language]; + if (!languageImplementation) { + messages.push(`LanguageImplementation ${language} not found. Adding.`); + languageImplementationsByLanguage[language] = emptyLanguageImplementation(language, metaSchema); + } + }); + + return Object.values(languageImplementationsByLanguage); +} + +function emptyLanguageImplementation(language, sourceTypesByType) { + return new LanguageImplementation( + language, + 'TODO', + Object.values(sourceTypesByType).map(sourceSchemaType => new TypeSupportStatus(sourceSchemaType.type, IMPLEMENTATION_STATUS_UNKNOWN, [], sourceSchemaType.enumValues === null ? null : [], null))); +} + +function parseEnum(rawJson, propertyName, errorMessage, knownValues) { + const string = parseString(rawJson, propertyName, errorMessage); + if (!knownValues.includes(string)) { + throw new Error(errorMessage); + } + return string; +} + +function parseString(rawJson, propertyName, errorMessage, nullable = false) { + const property = rawJson[propertyName]; + if ((property === null || property === undefined) && nullable) { + return null; + } + if (typeof property !== 'string') { + throw new Error(errorMessage); + } + return property; +} + +function parseArray(rawJson, propertyName, entryParser, errorMessage, entryErrorFormatter, messages, nullable = false) { + const property = rawJson[propertyName]; + if ((property === null || property === undefined) && nullable) { + return null; + } + return parseArrayValue(property, entryParser, errorMessage, entryErrorFormatter, messages); +} + +function parseArrayValue(arrayValue, entryParser, errorMessage, entryErrorFormatter, messages) { + if (!Array.isArray(arrayValue)) { + throw new Error(errorMessage); + } + const entries = []; + arrayValue.forEach(entry => { + try { + entries.push(entryParser(entry)); + } catch (error) { + messages.push(entryErrorFormatter(error)); + } + }); + return entries; +} diff --git a/scripts/meta-schema.js b/scripts/meta-schema.js deleted file mode 100644 index 60ca9a25..00000000 --- a/scripts/meta-schema.js +++ /dev/null @@ -1,590 +0,0 @@ -import {readJsonSchemaTypes} from "./json-schema.js"; -import fs from 'fs'; -import yaml from 'yaml'; -import { - metaSchemaTypesFileName, - metaSchemaLanguageStatusPath, - metaSchemaTypesPath, - metaSchemaLanguageStatusFileName, - schemaSourceDirPath, - metaSchemaLanguageFilePrefix, isExperimentalProperty, isExperimentalType -} from "./util.js"; - -export const KNOWN_LANGUAGES = [ - 'cpp', - 'go', - 'java', - 'js', -]; - -const IMPLEMENTATION_STATUS_UNKNOWN = 'unknown'; -const IMPLEMENTATION_STATUSES = ['supported', IMPLEMENTATION_STATUS_UNKNOWN, 'not_implemented', 'ignored', 'not_applicable'] - -export function writeMetaSchema(metaSchema) { - fs.writeFileSync(metaSchemaTypesPath, yaml.stringify(metaSchema.toJson().types, {lineWidth: 0})); - - metaSchema.languageImplementations.forEach(languageImplementation => { - fs.writeFileSync(metaSchemaLanguageStatusPath(languageImplementation.language), yaml.stringify(languageImplementation.toJson(), {lineWidth: 0})); - }); -} - -export function readAndFixMetaSchema() { - // Track messages tracking schema fixes - const messages = []; - - // Parse meta schema types and sanitize - const metaSchemaTypesContent = fs.readFileSync(metaSchemaTypesPath, "utf-8"); - const metaSchemaTypesDoc = yaml.parse(metaSchemaTypesContent); - const metaSchemaTypes = parseArrayValue( - metaSchemaTypesDoc, - entryJson => MetaSchemaType.parseJson(entryJson, messages), - `${metaSchemaTypesFileName} is invalid`, - error => `${metaSchemaTypesFileName} has invalid MetaSchemaType: ${error.message}. Skipping.`, - messages); - - // Parse meta schema language implementations and sanitize - const metaSchemaLanguageImplementations = []; - KNOWN_LANGUAGES.forEach(language => { - let metaSchemaLanguageStatusContent; - try { - metaSchemaLanguageStatusContent = fs.readFileSync(metaSchemaLanguageStatusPath(language), "utf-8"); - } catch (error) { - messages.push(`Error reading ${metaSchemaLanguageStatusFileName(language)}: ${error.message}. Skipping.`); - return; - } - const metaSchemaLanguageStatusDoc = yaml.parse(metaSchemaLanguageStatusContent); - const languageImplementation = LanguageImplementation.parseJson(language, metaSchemaLanguageStatusDoc, messages); - metaSchemaLanguageImplementations.push(languageImplementation); - }); - - // Create full MetaSchema from meta schema types and language implementations - let metaSchema = new MetaSchema(metaSchemaTypes, metaSchemaLanguageImplementations); - - const jsonSchemaTypesByType = {}; - readJsonSchemaTypes().forEach(type => jsonSchemaTypesByType[type.type] = type); - - // Reconcile meta schema with json schema - reconcileTypes(metaSchema, jsonSchemaTypesByType, messages); - reconcileLanguageImplementations(metaSchema, jsonSchemaTypesByType, messages); - - return {messages, metaSchema}; -} - -// Types - -export class MetaSchema { - types; - languageImplementations; - - constructor(types, languageImplementations) { - this.types = types; - this.languageImplementations = languageImplementations; - } - - toJson() { - let sortedTypes = this.types.map(type => type.toJson()); - // Types in lexicographical order, with non-experimental first - sortedTypes.sort((a, b) => { - const differentMaturities = isExperimentalType(a.type) - isExperimentalType(b.type); - return differentMaturities === 0 ? a.type.localeCompare(b.type) : +differentMaturities; - }); - - return {types: sortedTypes, languageImplementations: this.languageImplementations.map(languageImplementation => languageImplementation.toJson())}; - } -} - -export class MetaSchemaType { - type; - properties; // null if enum - enumValues; // null if not enum - isSdkExtensionPlugin; - - constructor(type, properties, enumValues, isSdkExtensionPlugin) { - this.type = type; - this.properties = properties; - this.enumValues = enumValues; - this.isSdkExtensionPlugin = isSdkExtensionPlugin; - - } - - toJson() { - const json = {type: this.type}; - - if (this.enumValues !== null) { - const enumValues = this.enumValues.map(enumValue => enumValue.toJson()); - enumValues.sort((a, b) => a.enumValue.localeCompare(b.enumValue)); - json.enumValues = enumValues; - } - if (this.properties !== null) { - const properties = this.properties.map(property => property.toJson()); - // Properties in lexicographical order, with non-experimental first - properties.sort((a, b) => { - const differentMaturities = isExperimentalProperty(a.property) - isExperimentalProperty(b.property); - return differentMaturities === 0 ? a.property.localeCompare(b.property) : +differentMaturities; - }); - json.properties = properties; - } - - json.isSdkExtensionPlugin = this.isSdkExtensionPlugin; - - return json; - } - - static parseJson(rawJson, messages) { - const type = parseString(rawJson, 'type', `MetaSchemaType has invalid 'type'`); - const properties = parseArray( - rawJson, - 'properties', - entryJson => MetaSchemaProperty.parseJson(entryJson, messages), - `MetaSchemaType '${type}' has invalid 'properties'`, - error => `MetaSchemaType '${type}' has invalid property: ${error.message}. Skipping.`, - messages, - true); - const enumValues = parseArray( - rawJson, - 'enumValues', - entryJson => MetaSchemaEnumValue.parseJson(entryJson, messages), - `MetaSchemaType '${type}' has invalid 'enumValues'`, - error => `MetaSchemaType '${type}' has invalid enumValue: ${error.message}. Skipping.`, - messages, - true); - const isSdkExtensionPlugin = parseBoolean(rawJson, 'isSdkExtensionPlugin', `MetaSchemaType has invalid 'isSdkExtensionPlugin'`); - return new MetaSchemaType(type, properties, enumValues, isSdkExtensionPlugin); - } -} - -export class MetaSchemaProperty { - property; - description; - - constructor(property, description) { - this.property = property; - this.description = description; - } - - toJson() { - return {property: this.property, description: this.description}; - } - - static parseJson(rawJson, messages) { - const property = parseString(rawJson, 'property', `MetaSchemaProperty has invalid 'property'`); - const description = parseString(rawJson, 'description', `MetaSchemaProperty has invalid 'description'`); - return new MetaSchemaProperty(property, description); - } -} - -export class MetaSchemaEnumValue { - enumValue; - description; - - constructor(enumValue, description) { - this.enumValue = enumValue; - this.description = description; - } - - toJson() { - return {enumValue: this.enumValue, description: this.description}; - } - - static parseJson(rawJson, messages) { - const enumValue = parseString(rawJson, 'enumValue', `MetaSchemaEnumValue has invalid 'enumValue'`); - const description = parseString(rawJson, 'description', `MetaSchemaEnumValue has invalid 'description'`); - return new MetaSchemaEnumValue(enumValue, description); - } -} - -export class LanguageImplementation { - language; - latestSupportedFileFormat; - typeSupportStatuses; - - constructor(language, latestSupportedFileFormat, typeSupportStatuses) { - this.language = language; - this.latestSupportedFileFormat = latestSupportedFileFormat; - this.typeSupportStatuses = typeSupportStatuses; - } - - toJson() { - // Types in lexicographical order, with non-experimental first - const typeSupportStatuses = this.typeSupportStatuses.map(typeSupportStatus => typeSupportStatus.toJson()); - typeSupportStatuses.sort((a, b) => { - const differentMaturities = isExperimentalType(a.type) - isExperimentalType(b.type); - return differentMaturities === 0 ? a.type.localeCompare(b.type) : +differentMaturities; - }); - - return { - latestSupportedFileFormat: this.latestSupportedFileFormat, - typeSupportStatuses - }; - } - - static parseJson(language, rawJson, messages) { - const latestSupportedFileFormat = parseString(rawJson, 'latestSupportedFileFormat', `LanguageImplementation has invalid 'latestSupportedFileFormat'`); - const typeSupportStatuses = parseArray( - rawJson, - 'typeSupportStatuses', - entryJson => TypeSupportStatus.parseJson(entryJson, messages), - `LanguageImplementation '${language}' has invalid 'typeSupportStatuses'`, - error => `LanguageImplementation '${language}' has invalid TypeSupportStatus: ${error.message}. Skipping.`, - messages); - return new LanguageImplementation(language, latestSupportedFileFormat, typeSupportStatuses); - } -} - -export class TypeSupportStatus { - type; - status; - propertyOverrides; // null if enum - enumOverrides; // null if not enum - notes; - - constructor(type, status, propertyOverrides, enumOverrides, notes) { - this.type = type; - this.status = status; - this.propertyOverrides = propertyOverrides; - this.enumOverrides = enumOverrides; - this.notes = notes; - } - - toJson() { - const json = {type: this.type, status: this.status}; - - if (this.enumOverrides !== null) { - const enumOverrides = this.enumOverrides.map(enumValueStatus => enumValueStatus.toJson()); - enumOverrides.sort((a, b) => a.enumValue.localeCompare(b.enumValue)); - json.enumOverrides = enumOverrides; - } - if (this.propertyOverrides !== null) { - const propertyOverrides = this.propertyOverrides.map(propertyStatus => propertyStatus.toJson()); - propertyOverrides.sort((a, b) => a.property.localeCompare(b.property)); - json.propertyOverrides = propertyOverrides; - } - if(this.notes !== null) { - json.notes = this.notes; - } - - return json; - } - - static parseJson(rawJson, messages) { - const type = parseString(rawJson, 'type', `TypeSupportStatus has invalid 'type'`); - const status = parseEnum(rawJson, 'status', `TypeSupportStatus has invalid 'status'`, IMPLEMENTATION_STATUSES); - const propertyOverrides = parseArray( - rawJson, - 'propertyOverrides', - entryJson => PropertyStatus.parseJson(entryJson, messages), - `TypeSupportStatus '${type}' has invalid 'propertyOverrides'`, - error => `TypeSupportStatus '${type}' has invalid PropertyStatus: ${error.message}. Skipping.`, - messages, - true); - const enumOverrides = parseArray( - rawJson, - 'enumOverrides', - entryJson => EnumValueStatus.parseJson(entryJson, messages), - `TypeSupportStatus '${type}' has invalid 'enumOverrides'`, - error => `TypeSupportStatus '${type}' has invalid EnumValueStatus: ${error.message}. Skipping.`, - messages, - true); - const notes = parseString(rawJson, 'notes', `TypeSupportStatus has invalid 'notes'`, true); - return new TypeSupportStatus(type, status, propertyOverrides, enumOverrides, notes); - } -} - -export class PropertyStatus { - property; - status; - - constructor(property, status) { - this.property = property; - this.status = status; - } - - toJson() { - return { - property: this.property, - status: this.status - }; - } - - static parseJson(rawJson, messages) { - const property = parseString(rawJson, 'property', `PropertyStatus has invalid 'property'`); - const status = parseEnum(rawJson, 'status', `PropertyStatus has invalid 'status'`, IMPLEMENTATION_STATUSES); - return new PropertyStatus(property, status); - } -} - -export class EnumValueStatus { - enumValue; - status; - - constructor(enumValue, status) { - this.enumValue = enumValue; - this.status = status; - } - - toJson() { - return { - enumValue: this.enumValue, - status: this.status - }; - } - - static parseJson(rawJson, messages) { - const enumValue = parseString(rawJson, 'enumValue', `EnumValueStatus has invalid 'enumValue'`); - const status = parseEnum(rawJson, 'status', `EnumValueStatus has invalid 'status'`, IMPLEMENTATION_STATUSES); - return new EnumValueStatus(enumValue, status); - } -} - -// Helper functions - -function reconcileTypes(metaSchema, jsonSchemaTypesByType, messages) { - const metaSchemaTypesByType = {}; - metaSchema.types.forEach(type => metaSchemaTypesByType[type.type] = type); - - // Find any types in both json schema and meta schema and make sure all json schema properties match - Object.entries(metaSchemaTypesByType).forEach(([type, metaSchemaType]) => { - const jsonSchemaType = jsonSchemaTypesByType[type]; - if (!jsonSchemaType) { - return; - } - const sanitizedProperties = []; - const emptyMetaSchemaType = jsonSchemaType.toMetaSchemaType(); - const jsonSchemaProperties = emptyMetaSchemaType.properties; - - if (!jsonSchemaType.isEnumType()) { - if (metaSchemaType.properties === null) { - messages.push(`Type ${type} in meta schema is missing properties and is not an enum type. Adding.`); - metaSchemaType.properties = []; - } - if (metaSchemaType.enumValues !== null) { - messages.push(`Type ${type} in meta schema has enumValues but is not an enum type. Removing.`); - metaSchemaType.enumValues = null; - } - - const jsonSchemaPropertiesByProperty = {}; - - // Remove properties in meta schema and not in json schema - jsonSchemaProperties.forEach(property => jsonSchemaPropertiesByProperty[property.property] = property); - metaSchemaType.properties.forEach(property => { - const propertyName = property.property; - if (!(propertyName in jsonSchemaPropertiesByProperty)) { - messages.push(`Type ${type} has property ${propertyName} in meta schema and not in JSON schema. Removing.`); - return; - } - sanitizedProperties.push(property); - }); - // Add properties in json schema and not in meta schema - const metaSchemaPropertiesByProperty = {}; - metaSchemaType.properties.forEach(property => metaSchemaPropertiesByProperty[property.property] = property); - jsonSchemaProperties.forEach(property => { - const propertyName = property.property; - if (!(propertyName in metaSchemaPropertiesByProperty)) { - messages.push(`Type ${type} has property ${propertyName} in JSON schema and not in meta schema. Adding.`); - sanitizedProperties.push(property); - } - }); - - metaSchemaType.properties = sanitizedProperties; - } else { - if (metaSchemaType.enumValues === null) { - messages.push(`Type ${type} in meta schema is missing enumValues and is an enum type. Adding.`); - metaSchemaType.enumValues = []; - } - if (metaSchemaType.properties !== null) { - messages.push(`Type ${type} in meta schema has properties but is an enum type. Removing.`); - metaSchemaType.properties = null; - } - - const sanitizedEnumValues = []; - - // Remove enumValues in meta schema and not in json schema - metaSchemaType.enumValues.forEach(enumValue => { - if (!(emptyMetaSchemaType.enumValues.find(item => item.enumValue === enumValue.enumValue))) { - messages.push(`Type ${type} has enumValue ${enumValue.enumValue} in meta schema and not in JSON schema. Removing.`); - return; - } - sanitizedEnumValues.push(enumValue); - }); - // Add enumValues in json schema and not in meta schema - emptyMetaSchemaType.enumValues.forEach(enumValue => { - if (!(metaSchemaType.enumValues.find(item => item.enumValue === enumValue.enumValue))) { - messages.push(`Type ${type} has enumValue ${enumValue.enumValue} in JSON schema and not in meta schema. Adding.`); - sanitizedEnumValues.push(enumValue); - } - }); - - metaSchemaType.enumValues = sanitizedEnumValues; - } - }); - - // Find and remove any types in meta schema not in json schema - Object.entries(metaSchemaTypesByType).forEach(([type, unused]) => { - if (!(type in jsonSchemaTypesByType)) { - messages.push(`Type ${type} found in ${metaSchemaTypesFileName} but not in JSON schema. Removing.`); - delete metaSchemaTypesByType[type]; - } - }); - - // Find and add any types in json schema not in meta schema - Object.entries(jsonSchemaTypesByType).forEach(([type, jsonSchemaType]) => { - if (!(type in metaSchemaTypesByType)) { - messages.push(`Type ${type} in ${jsonSchemaType.file} and path ${jsonSchemaType.jsonSchemaPath} is missing from ${metaSchemaTypesFileName}. Adding.`); - const metaSchemaType = jsonSchemaType.toMetaSchemaType(); - metaSchemaTypesByType[metaSchemaType.type] = metaSchemaType; - } - }); - - metaSchema.types = Object.values(metaSchemaTypesByType); -} - -function reconcileLanguageImplementations(metaSchema, jsonSchemaTypesByType, messages) { - const languageImplementationsByLanguage = {}; - metaSchema.languageImplementations.forEach(languageImplementation => languageImplementationsByLanguage[languageImplementation.language] = languageImplementation); - - // Find any types in both json schema and meta schema and make sure all json schema properties match - Object.entries(languageImplementationsByLanguage).forEach(([language, languageImplementation]) => { - if (!KNOWN_LANGUAGES.includes(language)) { - return; - } - const reconciledTypeSupportStatuses = []; - const expectedTypeSupportStatuses = emptyLanguageImplementation(language, metaSchema).typeSupportStatuses; - - // Remove extra types - languageImplementation.typeSupportStatuses.forEach(typeSupportStatus => { - const jsonSchemaType = jsonSchemaTypesByType[typeSupportStatus.type]; - if (!jsonSchemaType) { - messages.push(`LanguageImplementation ${language} has type ${typeSupportStatus.type} in meta schema and not in JSON schema. Removing.`); - return; - } - if (!jsonSchemaType.isEnumType()) { - if (typeSupportStatus.propertyOverrides === null) { - messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} is missing propertyOverrides in meta schema and is not an enum type. Adding.`); - typeSupportStatus.propertyOverrides = []; - } - if (typeSupportStatus.enumOverrides !== null) { - messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has propertyOverrides in meta schema but is not an enum type. Removing.`); - typeSupportStatus.propertyOverrides = null; - } - // Remove any propertyOverrides which occur in meta schema but not json schema - const reconciledPropertyOverrides = []; - typeSupportStatus.propertyOverrides.forEach(propertyStatus => { - if (!jsonSchemaType.properties.find(jsonSchemaProperty => jsonSchemaProperty.property === propertyStatus.property)) { - messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has propertyOverride ${propertyStatus.property} in meta schema and not in JSON schema. Removing.`); - return; - } - reconciledPropertyOverrides.push(propertyStatus); - }); - typeSupportStatus.propertyOverrides = reconciledPropertyOverrides; - } else { - if (typeSupportStatus.enumOverrides === null) { - messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} is missing enumOverrides in meta schema and is an enum type. Adding.`); - typeSupportStatus.enumOverrides = []; - } - if (typeSupportStatus.propertyOverrides !== null) { - messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has propertyOverrides in meta schema but is an enum type. Removing.`); - typeSupportStatus.propertyOverrides = null; - } - // Remove any enumOverrides which occur in meta schema but not json schema - const reconciledEnumOverrides = []; - typeSupportStatus.enumOverrides.forEach(enumValueStatus => { - if (!jsonSchemaType.enumValues.includes(enumValueStatus.enumValue)) { - messages.push(`LanguageImplementation ${language} type ${typeSupportStatus.type} has enumOverride ${enumValueStatus.enumValue} in meta schema and not in JSON schema. Removing.`); - return; - } - reconciledEnumOverrides.push(enumValueStatus); - }); - typeSupportStatus.enumOverrides = reconciledEnumOverrides; - } - - reconciledTypeSupportStatuses.push(typeSupportStatus); - }); - - // Add missing types - expectedTypeSupportStatuses.forEach(typeSupportStatus => { - if (!reconciledTypeSupportStatuses.find(item => item.type === typeSupportStatus.type)) { - messages.push(`LanguageImplementation ${language} is missing type ${typeSupportStatus.type} in meta schema. Adding.`); - reconciledTypeSupportStatuses.push(typeSupportStatus); - } - }); - - languageImplementation.typeSupportStatuses = reconciledTypeSupportStatuses; - }); - - // Find and remove any language implementations which are extra - fs.readdirSync(schemaSourceDirPath) - .filter(file => file.startsWith(metaSchemaLanguageFilePrefix)) - .filter(file => !KNOWN_LANGUAGES.some(language => metaSchemaLanguageStatusFileName(language) === file)) - .forEach(file => { - messages.push(`LanguageImplementation file ${file} found for unrecognized language. Removing.`); - fs.unlinkSync(schemaSourceDirPath + file); - }); - - // Find and add any language implementations not in meta schema - KNOWN_LANGUAGES.forEach(language => { - const languageImplementation = languageImplementationsByLanguage[language]; - if (!languageImplementation) { - messages.push(`LanguageImplementation ${language} not found. Adding.`); - languageImplementationsByLanguage[language] = emptyLanguageImplementation(language, metaSchema); - } - }); - - metaSchema.languageImplementations = Object.values(languageImplementationsByLanguage); -} - -function emptyLanguageImplementation(language, metaSchema) { - return new LanguageImplementation( - language, - 'TODO', - metaSchema.types.map(metaSchemaType => new TypeSupportStatus(metaSchemaType.type, IMPLEMENTATION_STATUS_UNKNOWN, [], metaSchemaType.enumValues === null ? null : [], null))); -} - -function parseEnum(rawJson, propertyName, errorMessage, knownValues) { - const string = parseString(rawJson, propertyName, errorMessage); - if (!knownValues.includes(string)) { - throw new Error(errorMessage); - } - return string; -} - -function parseString(rawJson, propertyName, errorMessage, nullable = false) { - const property = rawJson[propertyName]; - if ((property === null || property === undefined) && nullable) { - return null; - } - if (typeof property !== 'string') { - throw new Error(errorMessage); - } - return property; -} - -function parseBoolean(rawJson, propertyName, errorMessage) { - const property = rawJson[propertyName]; - if (typeof property !== 'boolean') { - throw new Error(errorMessage); - } - return property; -} - -function parseArray(rawJson, propertyName, entryParser, errorMessage, entryErrorFormatter, messages, nullable = false) { - const property = rawJson[propertyName]; - if ((property === null || property === undefined) && nullable) { - return null; - } - return parseArrayValue(property, entryParser, errorMessage, entryErrorFormatter, messages); -} - -function parseArrayValue(arrayValue, entryParser, errorMessage, entryErrorFormatter, messages) { - if (!Array.isArray(arrayValue)) { - throw new Error(errorMessage); - } - const entries = []; - arrayValue.forEach(entry => { - try { - entries.push(entryParser(entry)); - } catch (error) { - messages.push(entryErrorFormatter(error)); - } - }); - return entries; -} diff --git a/scripts/json-schema.js b/scripts/source-schema.js similarity index 50% rename from scripts/json-schema.js rename to scripts/source-schema.js index 8c7e6ea8..2a966487 100644 --- a/scripts/json-schema.js +++ b/scripts/source-schema.js @@ -1,66 +1,74 @@ import fs from 'fs'; -import {MetaSchemaEnumValue, MetaSchemaProperty, MetaSchemaType} from "./meta-schema.js"; -import {rootTypeName, schemaOutDirPath} from "./util.js"; +import { + isExperimentalProperty, + metaSchemaFilePrefix, + rootTypeName, + schemaSourceDirPath +} from "./util.js"; +import yaml from "yaml"; const localDefPrefix = '#/$defs/'; -export function readJsonSchemaTypes() { - const typesByType = {}; - const topLevelSchemas = {}; +export function readSourceSchema() { + const sourceTypesByType = {}; + const sourceContentByFile = {}; - fs.readdirSync(schemaOutDirPath) - .filter(file => file.endsWith(".json")) + fs.readdirSync(schemaSourceDirPath) + .filter(file => file.endsWith('.yaml') && !file.startsWith(metaSchemaFilePrefix)) .forEach(file => { - const fileContent = JSON.parse(fs.readFileSync(schemaOutDirPath + file, "utf-8")); + const sourceContent = yaml.parse(fs.readFileSync(schemaSourceDirPath + file, "utf-8")); - topLevelSchemas[file] = fileContent; + sourceContentByFile[file] = sourceContent; - if (file === 'opentelemetry_configuration.json') { - typesByType[rootTypeName] = new JsonSchemaType(rootTypeName, file, fileContent, '.', fileContent); + if (file === 'opentelemetry_configuration.yaml') { + sourceTypesByType[rootTypeName] = new SourceSchemaType(rootTypeName, file, sourceContent, '.', sourceContent); } - Object.entries(getDefs(fileContent)).forEach(([type, schema]) => { + Object.entries(getDefs(sourceContent)).forEach(([type, schema]) => { const jsonSchemaPath = `${localDefPrefix}${type}`; - if (type in typesByType) { - throw new Error(`${type} already exists in schemasByName with definition: ` + typesByType[type]); + if (type in sourceTypesByType) { + throw new Error(`${type} already exists in schemasByName with definition: ` + sourceTypesByType[type]); } - typesByType[type] = new JsonSchemaType(type, file, fileContent, jsonSchemaPath, schema); + sourceTypesByType[type] = new SourceSchemaType(type, file, sourceContent, jsonSchemaPath, schema); }); }); // Resolve refs to top-level types - Object.values(typesByType).forEach(jsonSchemaType => { - const ref = jsonSchemaType.schema['$ref']; + Object.values(sourceTypesByType).forEach(sourceSchemaType => { + const ref = sourceSchemaType.schema['$ref']; if (!ref) { return; } - const topLevelSchema = topLevelSchemas[ref]; + const topLevelSchema = sourceContentByFile[ref]; if (!topLevelSchema) { throw new Error("Could not resolve top level $ref:" + ref); } - jsonSchemaType.file = ref; - jsonSchemaType.jsonSchemaPath = '.'; - jsonSchemaType.schema = topLevelSchema; + sourceSchemaType.sourceFile = ref; + sourceSchemaType.jsonSchemaPath = '.'; + sourceSchemaType.schema = topLevelSchema; }); // Resolve properties, enum values - Object.values(typesByType).forEach(jsonSchemaType => { - jsonSchemaType.properties = resolveJsonSchemaProperties(jsonSchemaType.schema, typesByType); - jsonSchemaType.enumValues = resolveEnumValues(jsonSchemaType); - if (jsonSchemaType.properties.length > 0 && jsonSchemaType.enumValues !== null) { - throw new Error(`${jsonSchemaType.type} has enum values and properties`); + Object.values(sourceTypesByType).forEach(sourceSchemaType => { + sourceSchemaType.properties = resolveSourceSchemaProperties(sourceSchemaType.schema, sourceTypesByType); + sourceSchemaType.enumValues = resolveEnumValues(sourceSchemaType); + if (sourceSchemaType.properties.length > 0 && sourceSchemaType.enumValues !== null) { + throw new Error(`${sourceSchemaType.type} has enum values and properties`); } }); - return Object.values(typesByType); + return { + sourceContentByFile, + sourceTypesByType, + }; } -function resolveJsonSchemaProperties(jsonSchema, typesByType) { - const properties = jsonSchema['properties']; +function resolveSourceSchemaProperties(sourceSchema, typesByType) { + const properties = sourceSchema['properties']; if (!properties) { return []; } - const requiredProperties = jsonSchema['required'] || []; + const requiredProperties = sourceSchema['required'] || []; const resolvedProperties = []; Object.entries(properties).forEach(([propertyKey, propertySchema]) => { const type = propertySchema['type']; @@ -96,14 +104,14 @@ function resolveJsonSchemaProperties(jsonSchema, typesByType) { } else if (oneOf) { types.push('oneOf'); } - resolvedProperties.push(new JsonSchemaProperty(propertyKey, types, isSeq, isRequired, propertySchema)); + resolvedProperties.push(new SourceSchemaProperty(propertyKey, types, isSeq, isRequired, propertySchema)); }); return resolvedProperties; } -function resolveEnumValues(jsonSchemaType) { - const enumValues = jsonSchemaType.schema['enum']; +function resolveEnumValues(sourceSchemaType) { + const enumValues = sourceSchemaType.schema['enum']; if (!enumValues) { return null; } @@ -116,7 +124,7 @@ export function resolveRef(ref, typesByType) { const type = ref.substring(localDefPrefix.length); response = typesByType[type]; } else { - response = Object.values(typesByType).find(jsonSchemaType => jsonSchemaType.jsonSchemaRef() === ref); + response = Object.values(typesByType).find(sourceSchemaType => sourceSchemaType.jsonSchemaRef() === ref); } if (!response) { throw new Error(`Unable to find type for JSON schema ref ${ref}`); @@ -132,7 +140,7 @@ function getDefs(jsonSchema) { return defs; } -export class JsonSchemaProperty { +export class SourceSchemaProperty { property; types; isSeq; @@ -148,9 +156,8 @@ export class JsonSchemaProperty { } } -export class JsonSchemaType { +export class SourceSchemaType { type; - file; sourceFile; fileContent; jsonSchemaPath; @@ -158,10 +165,9 @@ export class JsonSchemaType { properties; enumValues; // null if not enum - constructor(type, file, fileContent, jsonSchemaPath, schema) { + constructor(type, sourceFile, fileContent, jsonSchemaPath, schema) { this.type = type; - this.file = file; - this.sourceFile = file.replace(".json", ".yaml"); + this.sourceFile = sourceFile; this.fileContent = fileContent; this.jsonSchemaPath = jsonSchemaPath; this.schema = schema; @@ -174,19 +180,26 @@ export class JsonSchemaType { } jsonSchemaRef() { - let ref = this.file; + let ref = this.sourceFile; if (this.jsonSchemaPath !== '.') { ref += this.jsonSchemaPath; } return ref; } - toMetaSchemaType() { - return new MetaSchemaType( - this.type, - this.properties.map(jsonSchemaProperty => new MetaSchemaProperty(jsonSchemaProperty.property, "TODO")), - this.enumValues === null ? null : this.enumValues.map(enumValue => new MetaSchemaEnumValue(enumValue, "TODO")), - false - ); + sortedEnumValues() { + const sorted = this.enumValues.slice(); + sorted.sort((a, b) => a.localeCompare(b)); + return sorted; + } + + sortedProperties() { + const sorted = this.properties.slice(); + // Sort in lexigraphical order, with non-experimental properties first + sorted.sort((a, b) => { + const differentMaturities = isExperimentalProperty(a.property) - isExperimentalProperty(b.property); + return differentMaturities === 0 ? a.property.localeCompare(b.property) : +differentMaturities; + }); + return sorted; } } diff --git a/scripts/util.js b/scripts/util.js index 849c76ed..cae86b35 100644 --- a/scripts/util.js +++ b/scripts/util.js @@ -9,9 +9,6 @@ export const schemaSourceDirPath = __dirname + "/../schema/"; export const schemaOutDirPath = __dirname + "/../schema_out/"; export const markdownDocPath = __dirname + "/../schema-docs.md"; -export const metaSchemaTypesFileName = `${metaSchemaFilePrefix}_types.yaml`; -export const metaSchemaTypesPath = schemaSourceDirPath + metaSchemaTypesFileName; - export const metaSchemaLanguageFilePrefix = `${metaSchemaFilePrefix}_language`; export const metaSchemaLanguageStatusFileName = (language) => `${metaSchemaLanguageFilePrefix}_${language}.yaml`; export const metaSchemaLanguageStatusPath = (language) => schemaSourceDirPath + metaSchemaLanguageStatusFileName(language); diff --git a/scripts/yaml-to-json.js b/scripts/yaml-to-json.js deleted file mode 100644 index 6377a599..00000000 --- a/scripts/yaml-to-json.js +++ /dev/null @@ -1,36 +0,0 @@ -import fs from 'fs'; -import {metaSchemaFilePrefix, schemaSourceDirPath, schemaOutDirPath} from "./util.js"; -import yaml from "yaml"; - -// Delete and recreate schema out directory -fs.rmSync(schemaOutDirPath, {recursive: true, force: true}); -fs.mkdirSync(schemaOutDirPath); - -// Read YAML source files and create a map of their content -const fileContentByFile = {}; -fs.readdirSync(schemaSourceDirPath) - .filter(file => file.endsWith('.yaml') && !file.startsWith(metaSchemaFilePrefix)) - .forEach(file => fileContentByFile[file] = fs.readFileSync(schemaSourceDirPath + file, "utf-8")); - -// Iterate through source files, replacing references -Object.keys(fileContentByFile).forEach(file => { - const jsonFile = file.replace('.yaml', '.json'); - Object.entries(fileContentByFile).forEach(([otherFile, otherContent]) => { - fileContentByFile[otherFile] = otherContent.replaceAll(`$ref: ${file}`, `$ref: ${jsonFile}`); - }); -}); - -// For each file, parse the YAML, annotate, and write to output directory -Object.entries(fileContentByFile).forEach(([file, content]) => { - const parsedContent = yaml.parse(content); - const jsonFile = file.replace('.yaml', '.json'); - - // Annotate with constant info - const annotated = { - '$id': `https://opentelemetry.io/otelconfig/${jsonFile}`, - '$schema': 'https://json-schema.org/draft/2020-12/schema', - ...parsedContent - } - - fs.writeFileSync(schemaOutDirPath + jsonFile, JSON.stringify(annotated, null, 2)); -});