Skip to content

Commit

Permalink
✅ Added Elastic client test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
nikhilbadyal committed Sep 24, 2023
1 parent c7187ac commit 01050bf
Show file tree
Hide file tree
Showing 7 changed files with 275 additions and 3 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ jobs:
- name: Display Python version
run: python -c "import sys; print(sys.version)"

- name: Setup ElasticSearch
run: |
sh test/es_bootstrap.sh
- name: Install Requirements
run: |
python -m pip install --upgrade pip
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ pytest-loguru==0.2.0
pytest-md==0.2.0
pytest-mock==3.11.1
pytest-xdist==3.3.1
python-dotenv==1.0.0
tenacity==8.2.3
tqdm==4.66.1
typing-extensions==4.8.0
6 changes: 6 additions & 0 deletions test/.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
STACK_VERSION=8.9.0
NODES=1
PORT=9200
SECURITY_ENABLED=true
ELASTICSEARCH_PASSWORD=verysecure
PLUGINS=""
82 changes: 79 additions & 3 deletions test/conftest.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,28 @@
"""Conftest for Pytest."""
from __future__ import annotations

import csv
import os
import sys
from typing import Any
from unittest.mock import Mock
from pathlib import Path
from typing import TYPE_CHECKING, Any, Iterator
from unittest.mock import Mock, patch

import pytest
from dotenv import load_dotenv
from elasticsearch.helpers import bulk
from pytest_elasticsearch import factories

from esxport.click_opt.cli_options import CliOptions
from esxport.elastic import ElasticsearchClient
from esxport.esxport import EsXport

if TYPE_CHECKING:
from elasticsearch import Elasticsearch

DATASET_PATH = Path(Path(Path(__file__).resolve().parent, "nyc-restaurants.csv"))
load_dotenv(Path(DATASET_PATH.parent, ".env"))


@pytest.fixture()
def cli_options() -> CliOptions:
Expand All @@ -19,7 +32,7 @@ def cli_options() -> CliOptions:
{
"query": query,
"output_file": "output.csv",
"url": "http://localhost:9200",
"url": "https://localhost:9200",
"user": "admin",
"password": "password",
"index_prefixes": ["index1", "index2"],
Expand Down Expand Up @@ -116,3 +129,66 @@ def _capture_wrap() -> None:
"""Avoid https://github.com/pytest-dev/pytest/issues/5502."""
sys.stderr.close = lambda *args: None # type: ignore[method-assign] #noqa: ARG005
sys.stdout.close = lambda *args: None # type: ignore[method-assign] #noqa: ARG005


elasticsearch_nooproc = factories.elasticsearch_noproc(
port=9200,
scheme="https",
host="localhost",
user="elastic",
password=os.getenv("ELASTICSEARCH_PASSWORD"),
)
elasticsearch_proc = factories.elasticsearch("elasticsearch_nooproc")


@pytest.fixture()
def index_name() -> str:
"""Index name."""
return "nyc_index"


@pytest.fixture()
def nyc_index(index_name: str, elasticsearch_proc: Elasticsearch) -> Any:
"""Create index."""
elasticsearch_proc.indices.create(index=index_name)
return index_name


# noinspection PyTypeChecker
def generate_actions() -> Iterator[dict[str, Any]]:
"""Reads the file through csv.DictReader() and for each row yields a single document.
This function is passed into the bulk() helper to create many documents in sequence.
"""
with Path(DATASET_PATH).open() as f:
reader = csv.DictReader(f)

for row in reader:
doc = {
"_id": row["CAMIS"],
"name": row["DBA"],
"borough": row["BORO"],
"cuisine": row["CUISINE DESCRIPTION"],
"grade": row["GRADE"] or None,
}

lat = row["Latitude"]
lon = row["Longitude"]
if lat not in ("", "0") and lon not in ("", "0"):
doc["location"] = {"lat": float(lat), "lon": float(lon)}
yield doc


@pytest.fixture()
def populate_data(nyc_index: str, elasticsearch_proc: Elasticsearch) -> Elasticsearch:
"""Populates the data in elastic instances."""
bulk(client=elasticsearch_proc, index=nyc_index, actions=generate_actions())
return elasticsearch_proc


@pytest.fixture()
def elastic_client(cli_options: CliOptions, populate_data: Elasticsearch) -> Iterator[ElasticsearchClient]:
"""Patches Elasticsearch client."""
es_client = ElasticsearchClient(cli_options)
with patch.object(es_client, "client", populate_data):
yield es_client
1 change: 1 addition & 0 deletions test/elastic/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Elasticsearch test cases."""
17 changes: 17 additions & 0 deletions test/elastic/client_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""Client Test cases."""
from __future__ import annotations

from typing import TYPE_CHECKING

if TYPE_CHECKING:
from typing_extensions import Self

from esxport.elastic import ElasticsearchClient


class TestElasticsearchClient:
"""Elastic Client Test cases."""

def test_index_exists(self: Self, nyc_index: str, elastic_client: ElasticsearchClient) -> None:
"""Test client return true when index exists."""
assert elastic_client.indices_exists(index=nyc_index) is True
167 changes: 167 additions & 0 deletions test/es_bootstrap.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
#!/bin/bash

set -o allexport
source test/.env
set +o allexport
set -euxo pipefail

if [[ -z $STACK_VERSION ]]; then
echo -e "\033[31;1mERROR:\033[0m Required environment variable [STACK_VERSION] not set\033[0m"
exit 1
fi

MAJOR_VERSION="$(echo "${STACK_VERSION}" | cut -c 1)"
network_name=elastic
if ! docker network inspect "$network_name" &>/dev/null; then
docker network create "$network_name"
echo "Created network: $network_name"
else
echo "Network $network_name already exists."
fi

mkdir -p "$(pwd)"/es/plugins

if [[ ! -z $PLUGINS ]]; then
docker run --rm \
--network=elastic \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
--entrypoint=/usr/share/elasticsearch/bin/elasticsearch-plugin \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}" \
install "${PLUGINS/\\n/ }" --batch
fi

for (( node=1; node<=${NODES-1}; node++ ))
do
port_com=$((9300 + node - 1))
UNICAST_HOSTS+="es$node:${port_com},"
done

for (( node=1; node<=${NODES-1}; node++ ))
do
port=$((${PORT:-9200} + node - 1))
port_com=$((9300 + node - 1))
if [ "x${MAJOR_VERSION}" == 'x6' ]; then
docker run \
--rm \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "xpack.security.enabled=false" \
--env "xpack.license.self_generated.type=basic" \
--env "discovery.zen.ping.unicast.hosts=${UNICAST_HOSTS}" \
--env "discovery.zen.minimum_master_nodes=${NODES}" \
--env "http.port=${port}" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--publish "${port_com}:${port_com}" \
--detach \
--network=elastic \
--name="es${node}" \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
elif [ "x${MAJOR_VERSION}" == 'x7' ]; then
docker run \
--rm \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.initial_master_nodes=es1" \
--env "discovery.seed_hosts=es1" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "xpack.security.enabled=false" \
--env "xpack.license.self_generated.type=basic" \
--env "http.port=${port}" \
--env "action.destructive_requires_name=false" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--detach \
--network=elastic \
--name="es${node}" \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
elif [ "x${MAJOR_VERSION}" == 'x8' ]; then
if [ "${SECURITY_ENABLED}" == 'true' ]; then
elasticsearch_password=${ELASTICSEARCH_PASSWORD-'changeme'}
docker run \
--rm \
--env "ELASTIC_PASSWORD=${elasticsearch_password}" \
--env "xpack.license.self_generated.type=basic" \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.initial_master_nodes=es1" \
--env "discovery.seed_hosts=es1" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "http.port=${port}" \
--env "action.destructive_requires_name=false" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--network=elastic \
--name="es${node}" \
--detach \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
else
docker run \
--rm \
--env "xpack.security.enabled=false" \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.initial_master_nodes=es1" \
--env "discovery.seed_hosts=es1" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "xpack.license.self_generated.type=basic" \
--env "http.port=${port}" \
--env "action.destructive_requires_name=false" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--network=elastic \
--name="es${node}" \
--detach \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
fi
fi
done

if [ "x${MAJOR_VERSION}" == 'x8' ] && [ "${SECURITY_ENABLED}" == 'true' ]; then
docker run \
--network elastic \
--rm \
alpine/curl \
--max-time 120 \
--retry 120 \
--retry-delay 1 \
--retry-connrefused \
--show-error \
--silent \
-k \
-u elastic:"${ELASTICSEARCH_PASSWORD-'changeme'}" \
https://es1:"$PORT"
else
docker run \
--network elastic \
--rm \
alpine/curl \
--max-time 120 \
--retry 120 \
--retry-delay 1 \
--retry-connrefused \
--show-error \
--silent \
http://es1:"$PORT"
fi

sleep 10

echo "Elasticsearch up and running"

0 comments on commit 01050bf

Please sign in to comment.