Skip to content

Commit

Permalink
Fix exception in Urllib3 when dealing with filelike body.
Browse files Browse the repository at this point in the history
  • Loading branch information
isra17 committed Feb 13, 2023
1 parent b8d7448 commit 6845539
Show file tree
Hide file tree
Showing 4 changed files with 292 additions and 142 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

- Fix exception in Urllib3 when dealing with filelike body.
([#1399](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1399))

### Added

- `opentelemetry-instrumentation-redis` Add `sanitize_query` config option to allow query sanitization. ([#1572](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1572))
- `opentelemetry-instrumentation-elasticsearch` Add optional db.statement query sanitization.
- `opentelemetry-instrumentation-elasticsearch` Add optional db.statement query sanitization.
([#1598](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1598))
- `opentelemetry-instrumentation-celery` Record exceptions as events on the span.
([#1573](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1573))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,9 @@ def response_hook(span, request, response):
---
"""

import collections.abc
import contextlib
import io
import typing
from timeit import default_timer
from typing import Collection
Expand Down Expand Up @@ -213,18 +215,20 @@ def instrumented_urlopen(wrapped, instance, args, kwargs):
if callable(response_hook):
response_hook(span, instance, response)

request_size = 0 if body is None else len(body)
request_size = _get_body_size(body)
response_size = int(response.headers.get("Content-Length", 0))

metric_attributes = _create_metric_attributes(
instance, response, method
)

duration_histogram.record(
elapsed_time, attributes=metric_attributes
)
request_size_histogram.record(
request_size, attributes=metric_attributes
)
if request_size is not None:
request_size_histogram.record(
request_size, attributes=metric_attributes
)
response_size_histogram.record(
response_size, attributes=metric_attributes
)
Expand Down Expand Up @@ -268,6 +272,16 @@ def _get_url(
return url


def _get_body_size(body: object) -> typing.Optional[int]:
if body is None:
return 0
if isinstance(body, collections.abc.Sized):
return len(body)
if isinstance(body, io.BytesIO):
return body.getbuffer().nbytes
return None


def _should_append_port(scheme: str, port: typing.Optional[int]) -> bool:
if not port:
return False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from timeit import default_timer

import urllib3
import urllib3.exceptions
from urllib3.request import encode_multipart_formdata

from opentelemetry import trace
from opentelemetry.instrumentation.urllib3 import URLLib3Instrumentor
Expand Down Expand Up @@ -87,136 +83,3 @@ def assert_success_span(
"net.peer.ip": self.assert_ip,
}
self.assertGreaterEqual(span.attributes.items(), attributes.items())


class TestURLLib3InstrumentorMetric(HttpTestBase, TestBase):
def setUp(self):
super().setUp()
self.assert_ip = self.server.server_address[0]
self.assert_port = self.server.server_address[1]
self.http_host = ":".join(map(str, self.server.server_address[:2]))
self.http_url_base = "http://" + self.http_host
self.http_url = self.http_url_base + "/status/200"
URLLib3Instrumentor().instrument(meter_provider=self.meter_provider)

def tearDown(self):
super().tearDown()
URLLib3Instrumentor().uninstrument()

def test_metric_uninstrument(self):
with urllib3.PoolManager() as pool:
pool.request("GET", self.http_url)
URLLib3Instrumentor().uninstrument()
pool.request("GET", self.http_url)

metrics_list = self.memory_metrics_reader.get_metrics_data()
for resource_metric in metrics_list.resource_metrics:
for scope_metric in resource_metric.scope_metrics:
for metric in scope_metric.metrics:
for point in list(metric.data.data_points):
self.assertEqual(point.count, 1)

def test_basic_metric_check_client_size_get(self):
with urllib3.PoolManager() as pool:
start_time = default_timer()
response = pool.request("GET", self.http_url)
client_duration_estimated = (default_timer() - start_time) * 1000

expected_attributes = {
"http.status_code": 200,
"http.host": self.assert_ip,
"http.method": "GET",
"http.flavor": "1.1",
"http.scheme": "http",
"net.peer.name": self.assert_ip,
"net.peer.port": self.assert_port,
}
expected_data = {
"http.client.request.size": 0,
"http.client.response.size": len(response.data),
}
expected_metrics = [
"http.client.duration",
"http.client.request.size",
"http.client.response.size",
]

resource_metrics = (
self.memory_metrics_reader.get_metrics_data().resource_metrics
)
for metrics in resource_metrics:
for scope_metrics in metrics.scope_metrics:
self.assertEqual(len(scope_metrics.metrics), 3)
for metric in scope_metrics.metrics:
for data_point in metric.data.data_points:
if metric.name in expected_data:
self.assertEqual(
data_point.sum, expected_data[metric.name]
)
if metric.name == "http.client.duration":
self.assertAlmostEqual(
data_point.sum,
client_duration_estimated,
delta=1000,
)
self.assertIn(metric.name, expected_metrics)
self.assertDictEqual(
expected_attributes,
dict(data_point.attributes),
)
self.assertEqual(data_point.count, 1)

def test_basic_metric_check_client_size_post(self):
with urllib3.PoolManager() as pool:
start_time = default_timer()
data_fields = {"data": "test"}
response = pool.request("POST", self.http_url, fields=data_fields)
client_duration_estimated = (default_timer() - start_time) * 1000

expected_attributes = {
"http.status_code": 501,
"http.host": self.assert_ip,
"http.method": "POST",
"http.flavor": "1.1",
"http.scheme": "http",
"net.peer.name": self.assert_ip,
"net.peer.port": self.assert_port,
}

body = encode_multipart_formdata(data_fields)[0]

expected_data = {
"http.client.request.size": len(body),
"http.client.response.size": len(response.data),
}
expected_metrics = [
"http.client.duration",
"http.client.request.size",
"http.client.response.size",
]

resource_metrics = (
self.memory_metrics_reader.get_metrics_data().resource_metrics
)
for metrics in resource_metrics:
for scope_metrics in metrics.scope_metrics:
self.assertEqual(len(scope_metrics.metrics), 3)
for metric in scope_metrics.metrics:
for data_point in metric.data.data_points:
if metric.name in expected_data:
self.assertEqual(
data_point.sum, expected_data[metric.name]
)
if metric.name == "http.client.duration":
self.assertAlmostEqual(
data_point.sum,
client_duration_estimated,
delta=1000,
)
self.assertIn(metric.name, expected_metrics)

self.assertDictEqual(
expected_attributes,
dict(data_point.attributes),
)
self.assertEqual(data_point.count, 1)
Loading

0 comments on commit 6845539

Please sign in to comment.