Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 0f2fa6e

Browse files
committed
chore: cleanup
1 parent 220a974 commit 0f2fa6e

File tree

11 files changed

+123
-79
lines changed

11 files changed

+123
-79
lines changed

engine/cli/command_line_parser.cc

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -124,14 +124,14 @@ bool CommandLineParser::SetupCommand(int argc, char** argv) {
124124
}
125125
}
126126
#endif
127-
// auto config = file_manager_utils::GetCortexConfig();
128-
// if (!config.llamacppVersion.empty() &&
129-
// config.latestLlamacppRelease != config.llamacppVersion) {
130-
// CLI_LOG(
131-
// "\nNew llama.cpp version available: " << config.latestLlamacppRelease);
132-
// CLI_LOG("To update, run: " << commands::GetCortexBinary()
133-
// << " engines update llama-cpp");
134-
// }
127+
auto config = file_manager_utils::GetCortexConfig();
128+
if (!config.llamacppVersion.empty() &&
129+
config.latestLlamacppRelease != config.llamacppVersion) {
130+
CLI_LOG(
131+
"\nNew llama.cpp version available: " << config.latestLlamacppRelease);
132+
CLI_LOG("To update, run: " << commands::GetCortexBinary()
133+
<< " engines update llama-cpp");
134+
}
135135

136136
return true;
137137
}

engine/cli/commands/cortex_upd_cmd.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -515,10 +515,10 @@ bool CortexUpdCmd::GetLinuxInstallScript(const std::string& v,
515515
const std::string& channel) {
516516
std::vector<std::string> path_list;
517517
if (channel == "nightly") {
518-
path_list = {"menloresearch", "cortex.cpp", "dev", "engine",
518+
path_list = {kMenloOrg, "cortex.cpp", "dev", "engine",
519519
"templates", "linux", "install.sh"};
520520
} else {
521-
path_list = {"menloresearch", "cortex.cpp", "main", "engine",
521+
path_list = {kMenloOrg, "cortex.cpp", "main", "engine",
522522
"templates", "linux", "install.sh"};
523523
}
524524
auto url_obj = url_parser::Url{

engine/cli/commands/cortex_upd_cmd.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,9 +79,9 @@ inline std::vector<std::string> GetReleasePath() {
7979
if (CORTEX_VARIANT == file_manager_utils::kNightlyVariant) {
8080
return {"cortex", "latest", "version.json"};
8181
} else if (CORTEX_VARIANT == file_manager_utils::kBetaVariant) {
82-
return {"repos", "menloresearch", "cortex.cpp", "releases"};
82+
return {"repos", kMenloOrg, "cortex.cpp", "releases"};
8383
} else {
84-
return {"repos", "menloresearch", "cortex.cpp", "releases", "latest"};
84+
return {"repos", kMenloOrg, "cortex.cpp", "releases", "latest"};
8585
}
8686
}
8787

engine/cli/main.cc

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -148,14 +148,14 @@ int main(int argc, char* argv[]) {
148148
std::chrono::hours(24);
149149
should_check_for_latest_llamacpp_version = now > last_check;
150150
}
151-
152-
if (false) {
151+
152+
if (should_check_for_latest_llamacpp_version) {
153153
std::thread t1([]() {
154154
// TODO: namh current we only check for llamacpp. Need to add support for other engine
155155
auto get_latest_version = []() -> cpp::result<std::string, std::string> {
156156
try {
157157
auto res = github_release_utils::GetReleaseByVersion(
158-
"menloresearch", "cortex.llamacpp", "latest");
158+
kGgmlOrg, "llama.cpp", "latest");
159159
if (res.has_error()) {
160160
CTL_ERR("Failed to get latest llama.cpp version: " << res.error());
161161
return cpp::fail("Failed to get latest llama.cpp version: " +
@@ -171,6 +171,7 @@ int main(int argc, char* argv[]) {
171171
};
172172

173173
auto res = get_latest_version();
174+
174175
if (res.has_error()) {
175176
CTL_ERR("Failed to get latest llama.cpp version: " << res.error());
176177
return;

engine/extensions/local-engine/local_engine.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -530,21 +530,21 @@ void LocalEngine::LoadModel(std::shared_ptr<Json::Value> json_body,
530530

531531
std::vector<std::string> v;
532532
v.reserve(params.size() + 1);
533-
auto engine_dir = engine_service_.GetEngineDirPath("llama.cpp");
533+
auto engine_dir = engine_service_.GetEngineDirPath(kLlamaRepo);
534534
if (engine_dir.has_error()) {
535535
CTL_WRN(engine_dir.error());
536536
server_map_.erase(model_id);
537537
return;
538538
}
539-
auto exe = (engine_dir.value().first / "llama-server").string();
539+
auto exe = (engine_dir.value().first / kLlamaServer).string();
540540

541541
v.push_back(exe);
542542
v.insert(v.end(), params.begin(), params.end());
543543
engine_service_.RegisterEngineLibPath();
544544

545545
auto log_path =
546546
(file_manager_utils::GetCortexLogPath() / "logs" / "cortex.log").string();
547-
CTL_INF("log: " << log_path);
547+
CTL_DBG("log: " << log_path);
548548
auto result = cortex::process::SpawnProcess(v, log_path, log_path);
549549
if (result.has_error()) {
550550
CTL_ERR("Fail to spawn process. " << result.error());

engine/services/engine_service.cc

Lines changed: 25 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -47,13 +47,6 @@ std::string Repo2Engine(const std::string& r) {
4747
}
4848
return r;
4949
};
50-
51-
std::string GetEnginePath(std::string_view e) {
52-
if (e == kLlamaRepo) {
53-
return kLlamaLibPath;
54-
}
55-
return kLlamaLibPath;
56-
};
5750
} // namespace
5851

5952
cpp::result<void, std::string> EngineService::InstallEngineAsync(
@@ -238,11 +231,10 @@ cpp::result<void, std::string> EngineService::DownloadEngine(
238231
: normalized_version;
239232
std::unordered_set<std::string> merged_variant_name = {
240233
"llama-" + latest_version_semantic + "-bin-" + variant_name.value() +
241-
".tar.gz",
234+
".tar.gz", // menlo
242235
"llama-" + latest_version_semantic + "-bin-" + variant_name.value() +
243-
".zip"};
236+
".zip"}; // ggml
244237

245-
// CTL_INF("merged_variant_name: " << merged_variant_name);
246238
for (const auto& asset : res.value()) {
247239
if (merged_variant_name.find(asset.name) != merged_variant_name.end()) {
248240
selected_variant = asset;
@@ -279,36 +271,35 @@ cpp::result<void, std::string> EngineService::DownloadEngine(
279271
}
280272
}
281273

282-
// auto normalize_version = "v" + selected_variant->version;
283-
auto normalize_version = selected_variant->version;
284274
auto variant_folder_name = engine_matcher_utils::GetVariantFromNameAndVersion(
285275
selected_variant->name, engine, selected_variant->version);
286276
auto variant_folder_path = file_manager_utils::GetEnginesContainerPath() /
287277
engine / variant_folder_name.value() /
288-
normalize_version;
278+
selected_variant->version;
289279
auto variant_path = variant_folder_path / selected_variant->name;
290280

291281
std::filesystem::create_directories(variant_folder_path);
292282

293283
CTL_INF("variant_folder_path: " + variant_folder_path.string());
294-
auto on_finished = [this, engine, selected_variant, variant_folder_path,
295-
normalize_version](const DownloadTask& finishedTask) {
284+
auto on_finished = [this, engine, selected_variant,
285+
variant_folder_path](const DownloadTask& finishedTask) {
296286
// try to unzip the downloaded file
297287
CTL_INF("Engine zip path: " << finishedTask.items[0].localPath.string());
298-
CTL_INF("Version: " + normalize_version);
288+
CTL_INF("Version: " + selected_variant->version);
299289

300290
auto extract_path = finishedTask.items[0].localPath.parent_path();
301291
archive_utils::ExtractArchive(finishedTask.items[0].localPath.string(),
302292
extract_path.string(), true);
303293
CTL_INF("local path: " << finishedTask.items[0].localPath.string()
304294
<< ", extract path: " << extract_path.string());
305295
auto variant = engine_matcher_utils::GetVariantFromNameAndVersion(
306-
selected_variant->name, engine, normalize_version);
296+
selected_variant->name, engine, selected_variant->version);
307297
CTL_INF("Extracted variant: " + variant.value());
308298
try {
299+
// Create version file
309300
std::ofstream meta(extract_path / "version.txt", std::ios::out);
310301
meta << "name: " << variant.value() << std::endl;
311-
meta << "version: " << normalize_version << std::endl;
302+
meta << "version: " << selected_variant->version << std::endl;
312303
meta.close();
313304
namespace fs = std::filesystem;
314305

@@ -326,7 +317,7 @@ cpp::result<void, std::string> EngineService::DownloadEngine(
326317
if (!fs::exists(extract_path.parent_path().parent_path() / "deps")) {
327318
fs::create_directory(extract_path.parent_path().parent_path() / "deps");
328319
}
329-
std::filesystem::permissions(extract_path / "llama-server",
320+
std::filesystem::permissions(extract_path / kLlamaServer,
330321
std::filesystem::perms::owner_exec |
331322
std::filesystem::perms::group_exec |
332323
std::filesystem::perms::others_exec,
@@ -337,17 +328,17 @@ cpp::result<void, std::string> EngineService::DownloadEngine(
337328
}
338329

339330
// set as default
340-
341-
auto res =
342-
SetDefaultEngineVariant(engine, normalize_version, variant.value());
331+
auto res = SetDefaultEngineVariant(engine, selected_variant->version,
332+
variant.value());
343333
if (res.has_error()) {
344334
CTL_ERR("Failed to set default engine variant: " << res.error());
345335
} else {
346336
CTL_INF("Set default engine variant: " << res.value().variant);
347337
}
348-
auto create_res = EngineService::UpsertEngine(
349-
engine, // engine_name
350-
kLocal, "", "", normalize_version, variant.value(), "Default", "");
338+
auto create_res =
339+
EngineService::UpsertEngine(engine, // engine_name
340+
kLocal, "", "", selected_variant->version,
341+
variant.value(), "Default", "");
351342

352343
if (create_res.has_error()) {
353344
CTL_ERR("Failed to create engine entry: " << create_res->engine_name);
@@ -358,7 +349,7 @@ cpp::result<void, std::string> EngineService::DownloadEngine(
358349
for (const auto& entry : std::filesystem::directory_iterator(
359350
variant_folder_path.parent_path())) {
360351
if (entry.is_directory() &&
361-
entry.path().filename() != normalize_version) {
352+
entry.path().filename() != selected_variant->version) {
362353
try {
363354
std::filesystem::remove_all(entry.path());
364355
} catch (const std::exception& e) {
@@ -472,8 +463,8 @@ std::string EngineService::GetMatchedVariant(
472463
cpp::result<std::vector<EngineService::EngineRelease>, std::string>
473464
EngineService::GetEngineReleases(const std::string& engine) const {
474465
auto ne = cortex::engine::NormalizeEngine(engine);
475-
auto ggml_org = github_release_utils::GetReleases("ggml-org", ne);
476-
auto menlo = github_release_utils::GetReleases("menloresearch", ne);
466+
auto ggml_org = github_release_utils::GetReleases(kGgmlOrg, ne);
467+
auto menlo = github_release_utils::GetReleases(kMenloOrg, ne);
477468
if (ggml_org.has_error() && menlo.has_error()) {
478469
return cpp::fail(ggml_org.error());
479470
}
@@ -500,13 +491,13 @@ EngineService::GetEngineVariants(const std::string& engine,
500491
bool filter_compatible_only) const {
501492
auto ne = cortex::engine::NormalizeEngine(engine);
502493
auto engine_release_menlo =
503-
github_release_utils::GetReleaseByVersion("menloresearch", ne, version);
494+
github_release_utils::GetReleaseByVersion(kMenloOrg, ne, version);
504495
auto engine_release_ggml =
505-
github_release_utils::GetReleaseByVersion("ggml-org", ne, version);
496+
github_release_utils::GetReleaseByVersion(kGgmlOrg, ne, version);
506497

507498
if (engine_release_menlo.has_error() && engine_release_ggml.has_error()) {
508499
return cpp::fail("Failed to get engine release: " +
509-
engine_release_menlo.error());
500+
engine_release_menlo.error());
510501
}
511502
if (engine_release_menlo.has_error()) {
512503
CTL_WRN("Failed to get engine release: " << engine_release_menlo.error());
@@ -835,8 +826,8 @@ EngineService::GetEngineDirPath(const std::string& engine_name) {
835826
CTL_DBG("user defined engine path: " << user_defined_engine_path);
836827
const std::filesystem::path engine_dir_path = [&] {
837828
if (user_defined_engine_path != nullptr) {
838-
return std::filesystem::path(user_defined_engine_path) /
839-
GetEnginePath(ne) / selected_engine_variant->variant /
829+
return std::filesystem::path(user_defined_engine_path) / kLlamaLibPath /
830+
selected_engine_variant->variant /
840831
selected_engine_variant->version;
841832
} else {
842833
return file_manager_utils::GetEnginesContainerPath() / ne /
@@ -897,8 +888,7 @@ std::vector<EngineV> EngineService::GetLoadedEngines() {
897888
cpp::result<github_release_utils::GitHubRelease, std::string>
898889
EngineService::GetLatestEngineVersion(const std::string& engine) const {
899890
auto ne = cortex::engine::NormalizeEngine(engine);
900-
auto res =
901-
github_release_utils::GetReleaseByVersion("menloresearch", ne, "latest");
891+
auto res = github_release_utils::GetReleaseByVersion(kMenloOrg, ne, "latest");
902892
if (res.has_error()) {
903893
return cpp::fail("Failed to fetch engine " + engine + " latest version!");
904894
}

engine/test/components/test_github_release_utils.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,14 @@ class GitHubReleaseUtilsTest : public ::testing::Test {};
66
TEST_F(GitHubReleaseUtilsTest, AbleToGetReleaseByVersion) {
77
auto version{"v0.1.36"};
88
auto result = github_release_utils::GetReleaseByVersion(
9-
"menloresearch", "cortex.llamacpp", version);
9+
kMenloOrg, "cortex.llamacpp", version);
1010

1111
ASSERT_TRUE(result.has_value());
1212
ASSERT_EQ(result->tag_name, version);
1313
}
1414

1515
TEST_F(GitHubReleaseUtilsTest, AbleToGetReleaseList) {
16-
auto result = github_release_utils::GetReleases("menloresearch", "cortex.llamacpp");
16+
auto result = github_release_utils::GetReleases(kMenloOrg, "cortex.llamacpp");
1717

1818
ASSERT_TRUE(result.has_value());
1919
ASSERT_TRUE(result->size() > 0);

engine/test/components/test_string_utils.cc

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -288,4 +288,44 @@ TEST_F(StringUtilsTestSuite, LargeInputPerformance) {
288288
EXPECT_EQ(RemoveSubstring(large_input, to_remove), "");
289289
}
290290

291+
TEST(LTrimTest, EmptyString) {
292+
std::string s = "";
293+
LTrim(s);
294+
EXPECT_EQ(s, "");
295+
}
296+
297+
TEST(LTrimTest, NoSpaces) {
298+
std::string s = "HelloWorld";
299+
LTrim(s);
300+
EXPECT_EQ(s, "HelloWorld");
301+
}
302+
303+
TEST(LTrimTest, LeadingSpaces) {
304+
std::string s = " HelloWorld";
305+
LTrim(s);
306+
EXPECT_EQ(s, "HelloWorld");
307+
}
291308

309+
TEST(LTrimTest, LeadingTabs) {
310+
std::string s = "\t\tHelloWorld";
311+
LTrim(s);
312+
EXPECT_EQ(s, "HelloWorld");
313+
}
314+
315+
TEST(LTrimTest, LeadingNewlines) {
316+
std::string s = "\n\nHelloWorld";
317+
LTrim(s);
318+
EXPECT_EQ(s, "HelloWorld");
319+
}
320+
321+
TEST(LTrimTest, OnlySpaces) {
322+
std::string s = " ";
323+
LTrim(s);
324+
EXPECT_EQ(s, "");
325+
}
326+
327+
TEST(LTrimTest, MixedSpaces) {
328+
std::string s = " \t\nHelloWorld ";
329+
LTrim(s);
330+
EXPECT_EQ(s, "HelloWorld ");
331+
}

engine/utils/engine_constants.h

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,12 @@ constexpr const auto kLlamaEngine = "llama-cpp";
55
constexpr const auto kRemote = "remote";
66
constexpr const auto kLocal = "local";
77

8-
98
constexpr const auto kLlamaRepo = "llama.cpp";
10-
constexpr const auto kLlamaLibPath = "./engines/cortex.llamacpp";
9+
constexpr const auto kLlamaLibPath = "./engines/llama.cpp";
10+
constexpr const auto kLlamaServer = "llama-server";
11+
12+
constexpr const auto kMenloOrg = "menloresearch";
13+
constexpr const auto kGgmlOrg = "ggml-org";
1114

1215
// other constants
1316
constexpr auto static kHuggingFaceHost = "huggingface.co";
@@ -18,6 +21,7 @@ constexpr auto static kDefaultGHUserAgent = "cortexcpp";
1821
constexpr auto static kWindowsOs = "win";
1922
constexpr auto static kMacOs = "mac";
2023
constexpr auto static kLinuxOs = "linux";
24+
constexpr auto static kUbuntuOs = "ubuntu";
2125
constexpr auto static kUnsupportedOs = "Unsupported OS";
2226

2327
constexpr auto static kCurlGetTimeout = 10;

0 commit comments

Comments
 (0)