From cf87d657951ceb9d54acfe43690ce293ec4d4989 Mon Sep 17 00:00:00 2001 From: Nikhil Badyal Date: Sun, 24 Sep 2023 21:16:23 +0530 Subject: [PATCH] =?UTF-8?q?=E2=9C=85=20Added=20Elastic=20client=20test=20c?= =?UTF-8?q?ases?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/pytest.yml | 4 + requirements.txt | 2 + test/.env | 6 ++ test/conftest.py | 82 ++++++++++++++++- test/elastic/__init__.py | 1 + test/elastic/client_test.py | 17 ++++ test/es_bootstrap.sh | 166 +++++++++++++++++++++++++++++++++++ 7 files changed, 275 insertions(+), 3 deletions(-) create mode 100644 test/.env create mode 100644 test/elastic/__init__.py create mode 100644 test/elastic/client_test.py create mode 100644 test/es_bootstrap.sh diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 175d049..a2ddfe1 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -40,6 +40,10 @@ jobs: - name: Display Python version run: python -c "import sys; print(sys.version)" + - name: Setup ElasticSearch + run: | + bash test/es_bootstrap.sh + - name: Install Requirements run: | python -m pip install --upgrade pip diff --git a/requirements.txt b/requirements.txt index b4754c9..6d3609e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,11 +6,13 @@ loguru==0.7.2 pytest==7.4.2 pytest-click==1.1.0 pytest-cov==4.1.0 +pytest-elasticsearch @ git+https://github.com/nikhilbadyal/pytest-elasticsearch/@main pytest-emoji==0.2.0 pytest-loguru==0.2.0 pytest-md==0.2.0 pytest-mock==3.11.1 pytest-xdist==3.3.1 +python-dotenv==1.0.0 tenacity==8.2.3 tqdm==4.66.1 typing-extensions==4.8.0 diff --git a/test/.env b/test/.env new file mode 100644 index 0000000..03cd894 --- /dev/null +++ b/test/.env @@ -0,0 +1,6 @@ +STACK_VERSION=8.9.0 +NODES=1 +PORT=9200 +SECURITY_ENABLED=true +ELASTICSEARCH_PASSWORD=verysecure +PLUGINS="" diff --git a/test/conftest.py b/test/conftest.py index 37b0645..acc109b 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,15 +1,28 @@ """Conftest for Pytest.""" from __future__ import annotations +import csv +import os import sys -from typing import Any -from unittest.mock import Mock +from pathlib import Path +from typing import TYPE_CHECKING, Any, Iterator +from unittest.mock import Mock, patch import pytest +from dotenv import load_dotenv +from elasticsearch.helpers import bulk +from pytest_elasticsearch import factories from esxport.click_opt.cli_options import CliOptions +from esxport.elastic import ElasticsearchClient from esxport.esxport import EsXport +if TYPE_CHECKING: + from elasticsearch import Elasticsearch + +DATASET_PATH = Path(Path(Path(__file__).resolve().parent, "nyc-restaurants.csv")) +load_dotenv(Path(DATASET_PATH.parent, ".env")) + @pytest.fixture() def cli_options() -> CliOptions: @@ -19,7 +32,7 @@ def cli_options() -> CliOptions: { "query": query, "output_file": "output.csv", - "url": "http://localhost:9200", + "url": "https://localhost:9200", "user": "admin", "password": "password", "index_prefixes": ["index1", "index2"], @@ -116,3 +129,66 @@ def _capture_wrap() -> None: """Avoid https://github.com/pytest-dev/pytest/issues/5502.""" sys.stderr.close = lambda *args: None # type: ignore[method-assign] #noqa: ARG005 sys.stdout.close = lambda *args: None # type: ignore[method-assign] #noqa: ARG005 + + +elasticsearch_nooproc = factories.elasticsearch_noproc( + port=9200, + scheme="https", + host="localhost", + user="elastic", + password=os.getenv("ELASTICSEARCH_PASSWORD"), +) +elasticsearch_proc = factories.elasticsearch("elasticsearch_nooproc") + + +@pytest.fixture() +def index_name() -> str: + """Index name.""" + return "nyc_index" + + +@pytest.fixture() +def nyc_index(index_name: str, elasticsearch_proc: Elasticsearch) -> Any: + """Create index.""" + elasticsearch_proc.indices.create(index=index_name) + return index_name + + +# noinspection PyTypeChecker +def generate_actions() -> Iterator[dict[str, Any]]: + """Reads the file through csv.DictReader() and for each row yields a single document. + + This function is passed into the bulk() helper to create many documents in sequence. + """ + with Path(DATASET_PATH).open() as f: + reader = csv.DictReader(f) + + for row in reader: + doc = { + "_id": row["CAMIS"], + "name": row["DBA"], + "borough": row["BORO"], + "cuisine": row["CUISINE DESCRIPTION"], + "grade": row["GRADE"] or None, + } + + lat = row["Latitude"] + lon = row["Longitude"] + if lat not in ("", "0") and lon not in ("", "0"): + doc["location"] = {"lat": float(lat), "lon": float(lon)} + yield doc + + +@pytest.fixture() +def populate_data(nyc_index: str, elasticsearch_proc: Elasticsearch) -> Elasticsearch: + """Populates the data in elastic instances.""" + bulk(client=elasticsearch_proc, index=nyc_index, actions=generate_actions()) + return elasticsearch_proc + + +@pytest.fixture() +def elastic_client(cli_options: CliOptions, populate_data: Elasticsearch) -> Iterator[ElasticsearchClient]: + """Patches Elasticsearch client.""" + es_client = ElasticsearchClient(cli_options) + with patch.object(es_client, "client", populate_data): + yield es_client diff --git a/test/elastic/__init__.py b/test/elastic/__init__.py new file mode 100644 index 0000000..a0fdfe7 --- /dev/null +++ b/test/elastic/__init__.py @@ -0,0 +1 @@ +"""Elasticsearch test cases.""" diff --git a/test/elastic/client_test.py b/test/elastic/client_test.py new file mode 100644 index 0000000..33f393e --- /dev/null +++ b/test/elastic/client_test.py @@ -0,0 +1,17 @@ +"""Client Test cases.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing_extensions import Self + + from esxport.elastic import ElasticsearchClient + + +class TestElasticsearchClient: + """Elastic Client Test cases.""" + + def test_index_exists(self: Self, nyc_index: str, elastic_client: ElasticsearchClient) -> None: + """Test client return true when index exists.""" + assert elastic_client.indices_exists(index=nyc_index) is True diff --git a/test/es_bootstrap.sh b/test/es_bootstrap.sh new file mode 100644 index 0000000..b1492b8 --- /dev/null +++ b/test/es_bootstrap.sh @@ -0,0 +1,166 @@ +#!/bin/bash + +set -o allexport +source test/.env +set +o allexport +set -euxo pipefail + +if [[ -z $STACK_VERSION ]]; then + echo -e "\033[31;1mERROR:\033[0m Required environment variable [STACK_VERSION] not set\033[0m" + exit 1 +fi + +MAJOR_VERSION="$(echo "${STACK_VERSION}" | cut -c 1)" +network_name=elastic +if ! docker network inspect "$network_name" &>/dev/null; then + docker network create "$network_name" + echo "Created network: $network_name" +else + echo "Network $network_name already exists." +fi + +mkdir -p "$(pwd)"/es/plugins + +if [[ ! -z $PLUGINS ]]; then + docker run --rm \ + --network=elastic \ + -v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \ + --entrypoint=/usr/share/elasticsearch/bin/elasticsearch-plugin \ + docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}" \ + install "${PLUGINS/\\n/ }" --batch +fi + +for (( node=1; node<=${NODES-1}; node++ )) +do + port_com=$((9300 + node - 1)) + UNICAST_HOSTS+="es$node:${port_com}," +done + +for (( node=1; node<=${NODES-1}; node++ )) +do + port=$((${PORT:-9200} + node - 1)) + port_com=$((9300 + node - 1)) + if [ "x${MAJOR_VERSION}" == 'x6' ]; then + docker run \ + --rm \ + --env "node.name=es${node}" \ + --env "cluster.name=docker-elasticsearch" \ + --env "cluster.routing.allocation.disk.threshold_enabled=false" \ + --env "bootstrap.memory_lock=true" \ + --env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \ + --env "xpack.security.enabled=false" \ + --env "xpack.license.self_generated.type=basic" \ + --env "discovery.zen.ping.unicast.hosts=${UNICAST_HOSTS}" \ + --env "discovery.zen.minimum_master_nodes=${NODES}" \ + --env "http.port=${port}" \ + --ulimit nofile=65536:65536 \ + --ulimit memlock=-1:-1 \ + --publish "${port}:${port}" \ + --publish "${port_com}:${port_com}" \ + --detach \ + --network=elastic \ + --name="es${node}" \ + -v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \ + docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}" + elif [ "x${MAJOR_VERSION}" == 'x7' ]; then + docker run \ + --rm \ + --env "node.name=es${node}" \ + --env "cluster.name=docker-elasticsearch" \ + --env "cluster.initial_master_nodes=es1" \ + --env "discovery.seed_hosts=es1" \ + --env "cluster.routing.allocation.disk.threshold_enabled=false" \ + --env "bootstrap.memory_lock=true" \ + --env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \ + --env "xpack.security.enabled=false" \ + --env "xpack.license.self_generated.type=basic" \ + --env "http.port=${port}" \ + --env "action.destructive_requires_name=false" \ + --ulimit nofile=65536:65536 \ + --ulimit memlock=-1:-1 \ + --publish "${port}:${port}" \ + --detach \ + --network=elastic \ + --name="es${node}" \ + -v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \ + docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}" + elif [ "x${MAJOR_VERSION}" == 'x8' ]; then + if [ "${SECURITY_ENABLED}" == 'true' ]; then + elasticsearch_password=${ELASTICSEARCH_PASSWORD-'changeme'} + docker run \ + --rm \ + --env "ELASTIC_PASSWORD=${elasticsearch_password}" \ + --env "xpack.license.self_generated.type=basic" \ + --env "node.name=es${node}" \ + --env "cluster.name=docker-elasticsearch" \ + --env "cluster.initial_master_nodes=es1" \ + --env "discovery.seed_hosts=es1" \ + --env "cluster.routing.allocation.disk.threshold_enabled=false" \ + --env "bootstrap.memory_lock=true" \ + --env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \ + --env "http.port=${port}" \ + --env "action.destructive_requires_name=false" \ + --ulimit nofile=65536:65536 \ + --ulimit memlock=-1:-1 \ + --publish "${port}:${port}" \ + --network=elastic \ + --name="es${node}" \ + --detach \ + -v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \ + docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}" + else + docker run \ + --rm \ + --env "xpack.security.enabled=false" \ + --env "node.name=es${node}" \ + --env "cluster.name=docker-elasticsearch" \ + --env "cluster.initial_master_nodes=es1" \ + --env "discovery.seed_hosts=es1" \ + --env "cluster.routing.allocation.disk.threshold_enabled=false" \ + --env "bootstrap.memory_lock=true" \ + --env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \ + --env "xpack.license.self_generated.type=basic" \ + --env "http.port=${port}" \ + --env "action.destructive_requires_name=false" \ + --ulimit nofile=65536:65536 \ + --ulimit memlock=-1:-1 \ + --publish "${port}:${port}" \ + --network=elastic \ + --name="es${node}" \ + --detach \ + -v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \ + docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}" + fi + fi +done + +if [ "x${MAJOR_VERSION}" == 'x8' ] && [ "${SECURITY_ENABLED}" == 'true' ]; then + docker run \ + --network elastic \ + --rm \ + alpine/curl \ + --max-time 120 \ + --retry 120 \ + --retry-delay 1 \ + --retry-connrefused \ + --show-error \ + --silent \ + -k \ + -u elastic:"${ELASTICSEARCH_PASSWORD-'changeme'}" \ + https://es1:"$PORT" +else + docker run \ + --network elastic \ + --rm \ + alpine/curl \ + --max-time 120 \ + --retry 120 \ + --retry-delay 1 \ + --retry-connrefused \ + --show-error \ + --silent \ + http://es1:"$PORT" +fi + + +echo "Elasticsearch up and running"