From debbfba51d6ebe81de10e0b978581cdba41bca92 Mon Sep 17 00:00:00 2001 From: Simeon Widdis Date: Wed, 3 Sep 2025 22:46:49 +0000 Subject: [PATCH] Don't recreate indices on every test Signed-off-by: Simeon Widdis --- doctest/build.gradle | 2 ++ doctest/test_docs.py | 29 +++++++++++++++++++++++------ 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/doctest/build.gradle b/doctest/build.gradle index 92b1a1c9e2c..b7058b04500 100644 --- a/doctest/build.gradle +++ b/doctest/build.gradle @@ -66,6 +66,8 @@ task startPrometheus(type: SpawnProcessTask) { //evaluationDependsOn(':') task startOpenSearch(type: SpawnProcessTask) { + dependsOn ':opensearch-sql-plugin:bundlePlugin' + if( getOSFamilyType() == "windows") { command "${path}\\gradlew.bat -p ${plugin_path} runRestTestCluster" } diff --git a/doctest/test_docs.py b/doctest/test_docs.py index d79e5b1cf55..636902f6390 100644 --- a/doctest/test_docs.py +++ b/doctest/test_docs.py @@ -11,6 +11,7 @@ import subprocess import sys import unittest +from concurrent.futures import ThreadPoolExecutor, as_completed from functools import partial import click @@ -205,6 +206,7 @@ class TestDataManager: def __init__(self): self.client = OpenSearch([ENDPOINT], verify_certs=True) + self.is_loaded = False def load_file(self, filename, index_name): mapping_file_path = './test_mapping/' + filename @@ -218,18 +220,33 @@ def load_json(): for line in f: yield json.loads(line) - helpers.bulk(self.client, load_json(), stats_only=True, index=index_name, refresh='wait_for') + helpers.bulk(self.client, load_json(), stats_only=True, index=index_name, refresh="wait_for") def load_all_test_data(self): - for index_name, filename in TEST_DATA.items(): + if self.is_loaded: + return + + def load_index(index_name, filename): if filename is not None: self.load_file(filename, index_name) else: debug(f"Skipping index '{index_name}' - filename is None") - def cleanup_indices(self): - indices_to_delete = list(TEST_DATA.keys()) - self.client.indices.delete(index=indices_to_delete, ignore_unavailable=True) + with ThreadPoolExecutor() as executor: + futures = { + executor.submit(load_index, index_name, filename): index_name + for index_name, filename in TEST_DATA.items() + } + + for future in as_completed(futures): + index_name = futures[future] + try: + future.result() + except Exception as e: + debug(f"Error loading index '{index_name}': {str(e)}") + raise + + self.is_loaded = True def sql_cli_transform(s): @@ -282,7 +299,7 @@ def set_up_test_indices_without_calcite(test): def tear_down(test): - get_test_data_manager().cleanup_indices() + pass docsuite = partial(doctest.DocFileSuite,