Skip to content

Commit

Permalink
feat: add api test
Browse files Browse the repository at this point in the history
  • Loading branch information
namchuai committed Sep 11, 2024
1 parent 7616f4a commit 67a7b2e
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 7 deletions.
4 changes: 2 additions & 2 deletions engine/e2e-test/main.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pytest

from test_cli_engine_list import TestCliEngineList
from test_api_engine_list import TestApiEngineList
from test_cli_engine_get import TestCliEngineGet
from test_cli_engine_list import TestCliEngineList

if __name__ == "__main__":
pytest.main([__file__, "-v"])
20 changes: 20 additions & 0 deletions engine/e2e-test/test_api_engine_list.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import pytest
import requests
from test_runner import start_server, stop_server


class TestApiEngineList:

@pytest.fixture(autouse=True)
def setup_and_teardown(self):
# Setup
start_server()

yield

# Teardown
stop_server()

def test_engines_list_api_run_successfully(self):
response = requests.get("http://localhost:3928/engines")
assert response.status_code == 200
49 changes: 46 additions & 3 deletions engine/e2e-test/test_runner.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,56 @@
import platform
import select
import subprocess
import os
from typing import List, Tuple
import time
from typing import List


def run(name: str, arguments: List[str]):
if platform.system() == "Windows":
executable = "build\\cortex-cpp.exe"
else:
executable = "build/cortex-cpp"
result = subprocess.run([executable] + arguments, capture_output=True, text=True)
print("Command name", name)
print("Running command: ", [executable] + arguments)
if len(arguments) == 0:
result = subprocess.run(executable, capture_output=True, text=True, timeout=5)
else:
result = subprocess.run(
[executable] + arguments, capture_output=True, text=True, timeout=5
)
return result.returncode, result.stdout, result.stderr


def start_server(timeout=5):
if platform.system() == "Windows":
executable = "build\\cortex-cpp.exe"
else:
executable = "build/cortex-cpp"
process = subprocess.Popen(
executable, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)

success_message = "Server started"
start_time = time.time()
while time.time() - start_time < timeout:
# Use select to check if there's data to read from stdout or stderr
readable, _, _ = select.select([process.stdout, process.stderr], [], [], 0.1)

for stream in readable:
line = stream.readline()
if line:
print(line.strip()) # Print output for debugging
if success_message in line:
# have to wait a bit for server to really up and accept connection
time.sleep(0.3)
return True, process # Success condition met

# Check if the process has ended
if process.poll() is not None:
return False, process # Process ended without success message

return False, process # Timeout reached


def stop_server():
run("Stop server", ["stop"])
6 changes: 4 additions & 2 deletions engine/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@

void RunServer() {
auto config = file_manager_utils::GetCortexConfig();
LOG_INFO << "Host: " << config.apiServerHost << " Port: " << config.apiServerPort << "\n";
LOG_INFO << "Host: " << config.apiServerHost
<< " Port: " << config.apiServerPort << "\n";

// Create logs/ folder and setup log to file
std::filesystem::create_directory(config.logFolderPath + "/" +
Expand Down Expand Up @@ -72,7 +73,8 @@ void RunServer() {
LOG_INFO << "Server started, listening at: " << config.apiServerHost << ":"
<< config.apiServerPort;
LOG_INFO << "Please load your model";
drogon::app().addListener(config.apiServerHost, std::stoi(config.apiServerPort));
drogon::app().addListener(config.apiServerHost,
std::stoi(config.apiServerPort));
drogon::app().setThreadNum(drogon_thread_num);
LOG_INFO << "Number of thread is:" << drogon::app().getThreadNum();

Expand Down

0 comments on commit 67a7b2e

Please sign in to comment.