Skip to content

Commit

Permalink
Fix load test according to adding a new load node, fixes #544
Browse files Browse the repository at this point in the history
Signed-off-by: Evgeniy Zayats <zayatsevgeniy@nspcc.io>
  • Loading branch information
Evgeniy Zayats committed Aug 13, 2023
1 parent bda67ee commit 355602e
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 37 deletions.
4 changes: 1 addition & 3 deletions pytest_tests/helpers/k6.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ class LoadResults:

class K6:
def __init__(self, load_params: LoadParams, shell: Shell):

self.load_params = load_params
self.shell = shell

Expand All @@ -68,7 +67,7 @@ def process_dir(self) -> str:
def k6_dir(self) -> str:
if not self._k6_dir:
self._k6_dir = self.shell.exec(
r"sudo find . -name 'k6' -exec dirname {} \; -quit"
r"sudo find / -name 'k6' -exec dirname {} \; -quit"
).stdout.strip("\n")
return self._k6_dir

Expand Down Expand Up @@ -126,7 +125,6 @@ def _generate_env_variables(self, load_params: LoadParams, k6_dir: str) -> str:

@allure.step("Start K6 on initiator")
def start(self) -> None:

self._k6_dir = self.k6_dir
command = (
f"{self.k6_dir}/k6 run {self._generate_env_variables(self.load_params, self.k6_dir)} "
Expand Down
26 changes: 14 additions & 12 deletions pytest_tests/resources/load_params.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,29 @@
import os

# Load node parameters
LOAD_NODES = os.getenv("LOAD_NODES", "").split(",")
LOAD_NODE_SSH_USER = os.getenv("LOAD_NODE_SSH_USER", "root")
LOAD_NODE_SSH_PRIVATE_KEY_PATH = os.getenv("LOAD_NODE_SSH_PRIVATE_KEY_PATH")
LOAD_NODES = os.getenv("LOAD_NODES", "127.0.0.1").split(",")
LOAD_NODE_SSH_USER = os.getenv("LOAD_NODE_SSH_USER", "k6")
LOAD_NODE_SSH_PRIVATE_KEY_PATH = os.getenv(
"LOAD_NODE_SSH_PRIVATE_KEY_PATH", "../neofs-dev-env/services/k6_node/id_ed25519"
)
BACKGROUND_WRITERS_COUNT = os.getenv("BACKGROUND_WRITERS_COUNT", 10)
BACKGROUND_READERS_COUNT = os.getenv("BACKGROUND_READERS_COUNT", 10)
BACKGROUND_OBJ_SIZE = os.getenv("BACKGROUND_OBJ_SIZE", 1024)
BACKGROUND_LOAD_MAX_TIME = os.getenv("BACKGROUND_LOAD_MAX_TIME", 600)

# Load run parameters

OBJ_SIZE = os.getenv("OBJ_SIZE", "1000").split(",")
CONTAINERS_COUNT = os.getenv("CONTAINERS_COUNT", "1").split(",")
OBJ_SIZE = [int(o) for o in os.getenv("OBJ_SIZE", "1000").split(",")]
CONTAINERS_COUNT = [int(c) for c in os.getenv("CONTAINERS_COUNT", "1").split(",")]
OUT_FILE = os.getenv("OUT_FILE", "1mb_200.json").split(",")
OBJ_COUNT = os.getenv("OBJ_COUNT", "4").split(",")
WRITERS = os.getenv("WRITERS", "200").split(",")
READERS = os.getenv("READER", "0").split(",")
DELETERS = os.getenv("DELETERS", "0").split(",")
LOAD_TIME = os.getenv("LOAD_TIME", "200").split(",")
OBJ_COUNT = [int(o) for o in os.getenv("OBJ_COUNT", "4").split(",")]
WRITERS = [int(w) for w in os.getenv("WRITERS", "200").split(",")]
READERS = [int(r) for r in os.getenv("READER", "0").split(",")]
DELETERS = [int(d) for d in os.getenv("DELETERS", "0").split(",")]
LOAD_TIME = [int(ld) for ld in os.getenv("LOAD_TIME", "200").split(",")]
LOAD_TYPE = os.getenv("LOAD_TYPE", "grpc").split(",")
LOAD_NODES_COUNT = os.getenv("LOAD_NODES_COUNT", "1").split(",")
STORAGE_NODE_COUNT = os.getenv("STORAGE_NODE_COUNT", "4").split(",")
LOAD_NODES_COUNT = [int(ldc) for ldc in os.getenv("LOAD_NODES_COUNT", "1").split(",")]
STORAGE_NODE_COUNT = [int(s) for s in os.getenv("STORAGE_NODE_COUNT", "4").split(",")]
CONTAINER_PLACEMENT_POLICY = os.getenv(
"CONTAINER_PLACEMENT_POLICY", "REP 1 IN X CBF 1 SELECT 1 FROM * AS X"
)
28 changes: 14 additions & 14 deletions pytest_tests/steps/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,12 @@ def start_stopped_nodes():

@allure.title("Init s3 client")
def init_s3_client(
load_nodes: list, login: str, pkey: str, container_placement_policy: str, hosting: Hosting
load_nodes: list,
login: str,
pkey: str,
container_placement_policy: str,
hosting: Hosting,
ssh_port: int,
):
service_configs = hosting.find_service_configs(STORAGE_NODE_SERVICE_NAME_REGEX)
host = hosting.get_host_by_service(service_configs[0].name)
Expand All @@ -52,7 +57,7 @@ def init_s3_client(
node_endpoint = service_configs[0].attributes["rpc_endpoint"]
# prompt_pattern doesn't work at the moment
for load_node in load_nodes:
ssh_client = SSHShell(host=load_node, login=login, private_key_path=pkey)
ssh_client = SSHShell(host=load_node, port=ssh_port, login=login, private_key_path=pkey)
path = ssh_client.exec(r"sudo find . -name 'k6' -exec dirname {} \; -quit").stdout.strip(
"\n"
)
Expand Down Expand Up @@ -88,28 +93,23 @@ def init_s3_client(
ssh_client.exec("aws configure", CommandOptions(interactive_inputs=configure_input))


@allure.title("Clear cache and data from storage nodes")
def clear_cache_and_data(hosting: Hosting):
service_configs = hosting.find_service_configs(STORAGE_NODE_SERVICE_NAME_REGEX)
for service_config in service_configs:
host = hosting.get_host_by_service(service_config.name)
host.stop_service(service_config.name)
host.delete_storage_node_data(service_config.name)
host.start_service(service_config.name)


@allure.title("Prepare objects")
def prepare_objects(k6_instance: K6):
k6_instance.prepare()


@allure.title("Prepare K6 instances and objects")
def prepare_k6_instances(
load_nodes: list, login: str, pkey: str, load_params: LoadParams, prepare: bool = True
load_nodes: list,
login: str,
pkey: str,
load_params: LoadParams,
ssh_port: int,
prepare: bool = True,
) -> list[K6]:
k6_load_objects = []
for load_node in load_nodes:
ssh_client = SSHShell(host=load_node, login=login, private_key_path=pkey)
ssh_client = SSHShell(port=ssh_port, host=load_node, login=login, private_key_path=pkey)
k6_load_object = K6(load_params, ssh_client)
k6_load_objects.append(k6_load_object)
for k6_load_object in k6_load_objects:
Expand Down
14 changes: 6 additions & 8 deletions pytest_tests/testsuites/load/test_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
)
from k6 import LoadParams
from load import (
clear_cache_and_data,
get_services_endpoints,
init_s3_client,
multi_node_k6_run,
Expand Down Expand Up @@ -37,18 +36,16 @@

ENDPOINTS_ATTRIBUTES = {
"http": {"regex": HTTP_GATE_SERVICE_NAME_REGEX, "endpoint_attribute": "endpoint"},
"grpc": {"regex": STORAGE_NODE_SERVICE_NAME_REGEX, "endpoint_attribute": "rpc_endpoint"},
"grpc": {"regex": STORAGE_NODE_SERVICE_NAME_REGEX, "endpoint_attribute": "endpoint_data0"},
"s3": {"regex": S3_GATE_SERVICE_NAME_REGEX, "endpoint_attribute": "endpoint"},
}


@pytest.mark.load
@pytest.mark.skip(reason="https://github.com/nspcc-dev/neofs-testcases/issues/544")
@pytest.mark.nspcc_dev__neofs_testcases__issue_544
class TestLoad(ClusterTestBase):
@pytest.fixture(autouse=True)
def clear_cache_and_data(self, hosting: Hosting):
clear_cache_and_data(hosting=hosting)
def restore_nodes(self, hosting: Hosting):
yield
start_stopped_nodes()

Expand All @@ -61,6 +58,7 @@ def init_s3_client(self, hosting: Hosting):
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
hosting=hosting,
container_placement_policy=CONTAINER_PLACEMENT_POLICY,
ssh_port=2222,
)

@pytest.mark.parametrize("obj_size, out_file", list(zip(OBJ_SIZE, OUT_FILE)))
Expand All @@ -73,7 +71,6 @@ def init_s3_client(self, hosting: Hosting):
@pytest.mark.parametrize("load_nodes_count", LOAD_NODES_COUNT)
@pytest.mark.benchmark
@pytest.mark.grpc
@pytest.mark.skip(reason="https://github.com/nspcc-dev/neofs-testcases/issues/544")
@pytest.mark.nspcc_dev__neofs_testcases__issue_544
def test_custom_load(
self,
Expand All @@ -100,8 +97,8 @@ def test_custom_load(
with allure.step("Get endpoints"):
endpoints_list = get_services_endpoints(
hosting=hosting,
service_name_regex=ENDPOINTS_ATTRIBUTES[LOAD_TYPE]["regex"],
endpoint_attribute=ENDPOINTS_ATTRIBUTES[LOAD_TYPE]["endpoint_attribute"],
service_name_regex=ENDPOINTS_ATTRIBUTES[load_type]["regex"],
endpoint_attribute=ENDPOINTS_ATTRIBUTES[load_type]["endpoint_attribute"],
)
endpoints = ",".join(endpoints_list[:node_count])
load_params = LoadParams(
Expand All @@ -122,6 +119,7 @@ def test_custom_load(
login=LOAD_NODE_SSH_USER,
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
load_params=load_params,
ssh_port=2222,
)
with allure.step("Run load"):
multi_node_k6_run(k6_load_instances)

0 comments on commit 355602e

Please sign in to comment.