Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Retry connection errors #214

Merged
merged 3 commits into from
May 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions logfire/_internal/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -673,6 +673,7 @@ def add_span_processor(span_processor: SpanProcessor) -> None:
endpoint=self.metrics_endpoint,
headers=headers,
preferred_temporality=METRICS_PREFERRED_TEMPORALITY,
session=session,
)
)
]
Expand Down
18 changes: 17 additions & 1 deletion logfire/_internal/exporters/otlp.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
from __future__ import annotations

import contextlib
import time
from random import random
from typing import Any, Iterable, Sequence, cast

import requests.exceptions
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.sdk.trace.export import SpanExportResult
from requests import Session
Expand Down Expand Up @@ -38,7 +41,20 @@ def gen() -> Iterable[bytes]:
yield chunk

request.body = gen() # type: ignore
return super().send(request, **kwargs)

max_attempts = 7
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should this be configurable in any way?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think so. Making it configurable would be painful with little benefit, and OTEL's retrying of status codes isn't configurable.

for attempt in range(max_attempts): # pragma: no branch
try:
response = super().send(request, **kwargs)
except requests.exceptions.RequestException:
if attempt < max_attempts - 1:
# Exponential backoff with jitter
time.sleep(2**attempt + random())
continue
raise
return response

raise RuntimeError('Unreachable code') # for pyright # pragma: no cover

def _check_body_size(self, size: int) -> None:
if size > self.max_body_size:
Expand Down
27 changes: 27 additions & 0 deletions tests/exporters/test_otlp_session.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
from typing import Any, Iterable, cast
from unittest.mock import Mock

import pytest
import requests.exceptions
from dirty_equals import IsFloat
from requests.models import PreparedRequest, Response as Response
from requests.sessions import HTTPAdapter

Expand Down Expand Up @@ -42,3 +45,27 @@ def test_max_body_size_generator() -> None:
with pytest.raises(BodyTooLargeError) as e:
s.post('http://example.com', data=iter([b'abc'] * 100))
assert str(e.value) == 'Request body is too large (12 bytes), must be less than 10 bytes.'


def test_connection_error_retries(monkeypatch: pytest.MonkeyPatch) -> None:
sleep_mock = Mock(return_value=0)
monkeypatch.setattr('time.sleep', sleep_mock)

class ConnectionErrorAdapter(HTTPAdapter):
def send(self, request: PreparedRequest, *args: Any, **kwargs: Any) -> Response:
raise requests.exceptions.ConnectionError()

session = OTLPExporterHttpSession(max_body_size=10)
session.mount('http://', ConnectionErrorAdapter())

with pytest.raises(requests.exceptions.ConnectionError):
session.post('http://example.com', data='123')

assert [call.args for call in sleep_mock.call_args_list] == [
(IsFloat(gt=1, lt=2),),
(IsFloat(gt=2, lt=3),),
(IsFloat(gt=4, lt=5),),
(IsFloat(gt=8, lt=9),),
(IsFloat(gt=16, lt=17),),
(IsFloat(gt=32, lt=33),),
]