Skip to content

Drop Python 2.7, 3.4, and 3.5 #1697

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 26, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 3 additions & 8 deletions elasticsearch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
# under the License.

# flake8: noqa
from __future__ import absolute_import

import logging
import re
Expand All @@ -25,9 +24,9 @@

from ._version import __versionstr__

_major, _minor, _patch = [
_major, _minor, _patch = (
int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups()
]
)
VERSION = __version__ = (_major, _minor, _patch)

logger = logging.getLogger("elasticsearch")
Expand Down Expand Up @@ -88,10 +87,6 @@
]

try:
# Asyncio only supported on Python 3.6+
if sys.version_info < (3, 6):
raise ImportError

from ._async.client import AsyncElasticsearch
from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection
from ._async.transport import AsyncTransport
Expand All @@ -102,5 +97,5 @@
"AsyncTransport",
"AsyncElasticsearch",
]
except (ImportError, SyntaxError):
except ImportError: # pragma: nocover
pass
5 changes: 1 addition & 4 deletions elasticsearch/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,10 @@ from .serializer import JSONSerializer as JSONSerializer
from .transport import Transport as Transport

try:
if sys.version_info < (3, 6):
raise ImportError

from ._async.client import AsyncElasticsearch as AsyncElasticsearch
from ._async.http_aiohttp import AIOHttpConnection as AIOHttpConnection
from ._async.transport import AsyncTransport as AsyncTransport
except (ImportError, SyntaxError):
except ImportError:
pass

VERSION: Tuple[int, int, int]
Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/_async/_extra_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
# See AIOHttpConnection.request() for more information why.
try:
import yarl
except ImportError:
except ImportError: # pragma: nocover
yarl = False

__all__ = ["aiohttp", "aiohttp_exceptions", "yarl"]
8 changes: 3 additions & 5 deletions elasticsearch/_async/client/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand All @@ -16,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

import logging

Expand Down Expand Up @@ -59,7 +57,7 @@
logger = logging.getLogger("elasticsearch")


class AsyncElasticsearch(object):
class AsyncElasticsearch:
"""
Elasticsearch low-level client. Provides a straightforward mapping from
Python to ES REST endpoints.
Expand Down Expand Up @@ -244,10 +242,10 @@ def __repr__(self):
# truncate to 5 if there are too many
if len(cons) > 5:
cons = cons[:5] + ["..."]
return "<{cls}({cons})>".format(cls=self.__class__.__name__, cons=cons)
return f"<{self.__class__.__name__}({cons})>"
except Exception:
# probably operating on custom transport and connection_pool, ignore
return super(AsyncElasticsearch, self).__repr__()
return super().__repr__()

async def __aenter__(self):
if hasattr(self.transport, "_async_call"):
Expand Down
5 changes: 1 addition & 4 deletions elasticsearch/_async/client/__init__.pyi
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand All @@ -16,8 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

import logging
from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union

Expand Down Expand Up @@ -59,7 +56,7 @@ from .xpack import XPackClient

logger: logging.Logger

class AsyncElasticsearch(object):
class AsyncElasticsearch:
transport: AsyncTransport

async_search: AsyncSearchClient
Expand Down
7 changes: 3 additions & 4 deletions elasticsearch/_async/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import asyncio
import logging

from ..compat import map
from ..exceptions import NotFoundError, TransportError
from ..helpers.actions import (
_ActionChunker,
Expand Down Expand Up @@ -57,7 +56,7 @@ async def _process_bulk_chunk(
raise_on_error=True,
ignore_status=(),
*args,
**kwargs
**kwargs,
):
"""
Send a bulk request to elasticsearch and process the output.
Expand Down Expand Up @@ -127,7 +126,7 @@ async def async_streaming_bulk(
yield_ok=True,
ignore_status=(),
*args,
**kwargs
**kwargs,
):

"""
Expand Down Expand Up @@ -287,7 +286,7 @@ async def async_scan(
request_timeout=None,
clear_scroll=True,
scroll_kwargs=None,
**kwargs
**kwargs,
):
"""
Simple abstraction on top of the
Expand Down
8 changes: 4 additions & 4 deletions elasticsearch/_async/helpers.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _process_bulk_chunk(
raise_on_error: bool = ...,
ignore_status: Optional[Union[int, Collection[int]]] = ...,
*args: Any,
**kwargs: Any
**kwargs: Any,
) -> AsyncGenerator[Tuple[bool, Any], None]: ...
def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> AsyncGenerator[T, None]: ...
def azip(
Expand All @@ -70,15 +70,15 @@ def async_streaming_bulk(
yield_ok: bool = ...,
ignore_status: Optional[Union[int, Collection[int]]] = ...,
*args: Any,
**kwargs: Any
**kwargs: Any,
) -> AsyncGenerator[Tuple[bool, Any], None]: ...
async def async_bulk(
client: AsyncElasticsearch,
actions: Union[Iterable[Any], AsyncIterable[Any]],
stats_only: bool = ...,
ignore_status: Optional[Union[int, Collection[int]]] = ...,
*args: Any,
**kwargs: Any
**kwargs: Any,
) -> Tuple[int, Union[int, List[Any]]]: ...
def async_scan(
client: AsyncElasticsearch,
Expand All @@ -90,7 +90,7 @@ def async_scan(
request_timeout: Optional[Union[float, int]] = ...,
clear_scroll: bool = ...,
scroll_kwargs: Optional[Mapping[str, Any]] = ...,
**kwargs: Any
**kwargs: Any,
) -> AsyncGenerator[int, None]: ...
async def async_reindex(
client: AsyncElasticsearch,
Expand Down
2 changes: 1 addition & 1 deletion elasticsearch/_async/http_aiohttp.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ async def perform_request(
else:
url = self.url_prefix + url
if query_string:
url = "%s?%s" % (url, query_string)
url = f"{url}?{query_string}"
url = self.host + url

timeout = aiohttp.ClientTimeout(
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/_async/transport.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def __init__(
retry_on_timeout=False,
send_get_body_as="GET",
meta_header=True,
**kwargs
**kwargs,
):
"""
:arg hosts: list of dictionaries, each containing keyword arguments to
Expand Down Expand Up @@ -110,7 +110,7 @@ def __init__(
self._async_init_called = False
self._sniff_on_start_event = None # type: asyncio.Event

super(AsyncTransport, self).__init__(
super().__init__(
hosts=[],
connection_class=connection_class,
connection_pool_class=connection_pool_class,
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/_async/transport.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ from ..connection import Connection
from ..connection_pool import ConnectionPool
from ..serializer import Deserializer, Serializer

class AsyncTransport(object):
class AsyncTransport:
DEFAULT_CONNECTION_CLASS: Type[Connection]
connection_pool: ConnectionPool
deserializer: Deserializer
Expand Down Expand Up @@ -64,7 +64,7 @@ class AsyncTransport(object):
retry_on_timeout: bool = ...,
send_get_body_as: str = ...,
meta_header: bool = ...,
**kwargs: Any
**kwargs: Any,
) -> None: ...
def add_connection(self, host: Any) -> None: ...
def set_connections(self, hosts: Collection[Any]) -> None: ...
Expand Down
8 changes: 3 additions & 5 deletions elasticsearch/client/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand All @@ -16,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

import logging

Expand Down Expand Up @@ -59,7 +57,7 @@
logger = logging.getLogger("elasticsearch")


class Elasticsearch(object):
class Elasticsearch:
"""
Elasticsearch low-level client. Provides a straightforward mapping from
Python to ES REST endpoints.
Expand Down Expand Up @@ -244,10 +242,10 @@ def __repr__(self):
# truncate to 5 if there are too many
if len(cons) > 5:
cons = cons[:5] + ["..."]
return "<{cls}({cons})>".format(cls=self.__class__.__name__, cons=cons)
return f"<{self.__class__.__name__}({cons})>"
except Exception:
# probably operating on custom transport and connection_pool, ignore
return super(Elasticsearch, self).__repr__()
return super().__repr__()

def __enter__(self):
if hasattr(self.transport, "_async_call"):
Expand Down
5 changes: 1 addition & 4 deletions elasticsearch/client/__init__.pyi
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
Expand All @@ -16,8 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

import logging
from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union

Expand Down Expand Up @@ -59,7 +56,7 @@ from .xpack import XPackClient

logger: logging.Logger

class Elasticsearch(object):
class Elasticsearch:
transport: Transport

async_search: AsyncSearchClient
Expand Down
25 changes: 9 additions & 16 deletions elasticsearch/client/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,13 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

import base64
import weakref
from datetime import date, datetime
from functools import wraps

from ..compat import PY2, quote, string_types, to_bytes, to_str, unquote, urlparse
from ..compat import quote, string_types, to_bytes, to_str, unquote, urlparse

# parts of URL to be omitted
SKIP_IN_PATH = (None, "", b"", [], ())
Expand All @@ -46,7 +45,7 @@ def _normalize_hosts(hosts):
for host in hosts:
if isinstance(host, string_types):
if "://" not in host:
host = "//%s" % host
host = f"//{host}"

parsed_url = urlparse(host)
h = {"host": parsed_url.hostname}
Expand All @@ -59,7 +58,7 @@ def _normalize_hosts(hosts):
h["use_ssl"] = True

if parsed_url.username or parsed_url.password:
h["http_auth"] = "%s:%s" % (
h["http_auth"] = "{}:{}".format(
unquote(parsed_url.username),
unquote(parsed_url.password),
)
Expand Down Expand Up @@ -96,13 +95,9 @@ def _escape(value):
return value

# encode strings to utf-8
if isinstance(value, string_types):
if PY2 and isinstance(value, unicode): # noqa: F821
return value.encode("utf-8")
if not PY2 and isinstance(value, str):
return value.encode("utf-8")

return str(value)
if not isinstance(value, str):
value = str(value)
return value.encode("utf-8")


def _make_path(*parts):
Expand Down Expand Up @@ -149,11 +144,9 @@ def _wrapped(*args, **kwargs):
"Only one of 'http_auth' and 'api_key' may be passed at a time"
)
elif http_auth is not None:
headers["authorization"] = "Basic %s" % (
_base64_auth_header(http_auth),
)
headers["authorization"] = f"Basic {_base64_auth_header(http_auth)}"
elif api_key is not None:
headers["authorization"] = "ApiKey %s" % (_base64_auth_header(api_key),)
headers["authorization"] = f"ApiKey {_base64_auth_header(api_key)}"

for p in es_query_params + GLOBAL_PARAMS:
if p in kwargs:
Expand Down Expand Up @@ -197,7 +190,7 @@ def _base64_auth_header(auth_value):
return to_str(auth_value)


class NamespacedClient(object):
class NamespacedClient:
def __init__(self, client):
self.client = client

Expand Down
2 changes: 0 additions & 2 deletions elasticsearch/client/utils.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@
# specific language governing permissions and limitations
# under the License.

from __future__ import unicode_literals

from typing import (
Any,
Callable,
Expand Down
Loading