Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Template tests #18

Merged
merged 5 commits into from
Dec 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ anyhow = "1.0"
colog = "1.3.0"
log = "0.4.22"

[lib]
name = "pike"
path = "src/lib.rs"

[[bin]]
name = "cargo-pike"
path = "src/main.rs"
Expand Down
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ cargo pike run --topology topology.toml --data-dir ./tmp
- `--base-http-port <BASE_HTTP_PORT>` - Базовый http-порт, с которого начнут открываться http-порты отдельных инстансов. Значение по умолчанию: `8000`
- `--base-pg-port <BASE_PG_PORT>` - Базовый порт постгрес протокола, с которого начнут открываться порты отдельных инстансов. Значение по умолчанию: `5432`
- `--picodata-path <BINARY_PATH>` - Путь до исполняемого файла Пикодаты. Значение по умолчанию: `picodata`
- `--release` - Сборка и запуск релизной версии плагина.
- `--release` - Сборка и запуск релизной версии плагина
- `--target-dir <TARGET_DIR>` - Директория собранных бинарных файлов. Значение по умолчанию: `target`

#### config.yaml

Expand Down Expand Up @@ -187,6 +188,7 @@ cargo pike plugin pack
#### Доступные опции

- `--debug` - Сборка и упаковка debug-версии плагина
- `--target-dir <TARGET_DIR>` - Директория собранных бинарных файлов. Значение по умолчанию: `target`

### `config apply`

Expand Down
7 changes: 6 additions & 1 deletion plugin_template/_Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,13 @@ edition = "2021"
publish = false

[dependencies]
serde = { version = "1", features = ["derive"] }
picodata-plugin = "24.6.1"
serde = { version = "1", features = ["derive"] }
log = "0.4"

[dev-dependencies]
picodata-pike = { git = "https://github.com/picodata/pike.git", branch = "template_tests" } # TODO: change after publish on crates.io
reqwest = { version = "0.12", features = ["blocking"] }

[build-dependencies]
liquid = "0.26"
Expand Down
42 changes: 25 additions & 17 deletions plugin_template/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,17 +56,19 @@ fn main() {
.expect("invalid manifest template");

let migrations_dir = crate_dir.join("migrations");
let migrations: Vec<String> = fs::read_dir(&migrations_dir)
.unwrap()
.map(|path| {
path.unwrap()
.path()
.strip_prefix(crate_dir)
.unwrap()
.to_string_lossy()
.into()
})
.collect();
let migrations: Vec<String> = match fs::read_dir(&migrations_dir) {
Ok(dir) => dir
.map(|path| {
path.unwrap()
.path()
.strip_prefix(crate_dir)
.unwrap()
.to_string_lossy()
.into()
})
.collect(),
Err(_) => Vec::new(),
};

let pkg_version = env::var("CARGO_PKG_VERSION").unwrap();

Expand All @@ -79,9 +81,11 @@ fn main() {
let out_manifest_path = Path::new(&out_dir).join("manifest.yaml");
fs::write(&out_manifest_path, template.render(&template_ctx).unwrap()).unwrap();

let mut cp_opts = CopyOptions::new();
cp_opts.overwrite = true;
dir::copy(migrations_dir, &out_dir, &cp_opts).unwrap();
if !migrations.is_empty() {
let mut cp_opts = CopyOptions::new();
cp_opts.overwrite = true;
dir::copy(migrations_dir, &out_dir, &cp_opts).unwrap();
}

// create symbolic link
let pkg_name = env::var("CARGO_PKG_NAME").unwrap();
Expand All @@ -91,10 +95,14 @@ fn main() {
std::os::unix::fs::symlink(out_manifest_path, plugin_path.join("manifest.yaml")).unwrap();
let lib_name = format!("lib{}.{}", pkg_name, LIB_EXT);
std::os::unix::fs::symlink(out_dir.join(&lib_name), plugin_path.join(lib_name)).unwrap();
std::os::unix::fs::symlink(out_dir.join("migrations"), plugin_path.join("migrations")).unwrap();

for m in &migrations {
println!("cargo::rerun-if-changed={m}");
if !migrations.is_empty() {
std::os::unix::fs::symlink(out_dir.join("migrations"), plugin_path.join("migrations"))
.unwrap();

for m in &migrations {
println!("cargo::rerun-if-changed={m}");
}
}

println!("cargo::rerun-if-changed={MANIFEST_TEMPLATE_NAME}");
Expand Down
63 changes: 63 additions & 0 deletions plugin_template/tests/helpers/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
use log::info;
use std::{
fs::{self},
io::ErrorKind,
path::{Path, PathBuf},
};

// TODO: check in workspaces
pub const TMP_DIR: &str = "tmp/";
pub const TOPOLOGY_PATH: &str = "topology.toml";
pub const TARGET_DIR: &str = "target";

pub struct Cluster {}

impl Drop for Cluster {
fn drop(&mut self) {
let data_dir = PathBuf::from(TMP_DIR.to_owned());
pike::cluster::stop(&data_dir).unwrap();
}
}

impl Cluster {
pub fn new() -> Cluster {
info!("cleaning artefacts from previous run");

match fs::remove_file(Path::new(TMP_DIR).join("instance.log")) {
Ok(()) => info!("Clearing logs."),
Err(e) if e.kind() == ErrorKind::NotFound => {
info!("instance.log not found, skipping cleanup");
},
Err(e) => panic!("failed to delete instance.log: {e}"),
}

match fs::remove_dir_all(TMP_DIR) {
Ok(()) => info!("clearing test plugin dir."),
Err(e) if e.kind() == ErrorKind::NotFound => {
info!("plugin dir not found, skipping cleanup");
},
Err(e) => panic!("failed to delete plugin_dir: {e}"),
}

Cluster {}
}
}

pub fn run_cluster() -> Cluster {
let cluster_handle = Cluster::new();
let data_dir = PathBuf::from(TMP_DIR.to_owned());
let topology_path = PathBuf::from(TOPOLOGY_PATH.to_owned());
let target_dir = PathBuf::from(TARGET_DIR.to_owned());
pike::cluster::run(
&topology_path,
&data_dir,
false,
8000,
&PathBuf::from("picodata".to_owned()),
5432,
false,
&target_dir,
)
.unwrap();
cluster_handle
}
11 changes: 11 additions & 0 deletions plugin_template/tests/metrics.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
mod helpers;

use helpers::run_cluster;
use reqwest::blocking as req;

#[test]
fn test_metrics() {
let _cluster_handle = run_cluster();
let resp = req::get("http://localhost:8001/metrics").unwrap();
assert!(resp.status().is_success());
}
6 changes: 3 additions & 3 deletions src/commands/plugin/pack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ const LIB_EXT: &str = "so";
#[cfg(target_os = "macos")]
const LIB_EXT: &str = "dylib";

pub fn cmd(pack_debug: bool) -> Result<()> {
pub fn cmd(pack_debug: bool, target_dir: &Path) -> Result<()> {
let root_dir = env::current_dir()?;
let plugin_name = &root_dir
.file_name()
Expand All @@ -38,10 +38,10 @@ pub fn cmd(pack_debug: bool) -> Result<()> {

let build_dir = if pack_debug {
cargo_build(BuildType::Debug).context("building release version of plugin")?;
Path::new(&root_dir).join("target").join("release")
Path::new(&root_dir).join(target_dir).join("release")
} else {
cargo_build(BuildType::Release).context("building debug version of plugin")?;
Path::new(&root_dir).join("target").join("debug")
Path::new(&root_dir).join(target_dir).join("debug")
};

let mut manifest_dir = root_dir.clone();
Expand Down
65 changes: 46 additions & 19 deletions src/commands/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,29 +162,17 @@ fn kill_picodata_instances() -> Result<()> {
Ok(())
}

pub fn cmd(
#[allow(clippy::too_many_arguments)]
pub fn cluster(
topology_path: &PathBuf,
data_dir: &Path,
disable_plugin_install: bool,
base_http_port: i32,
picodata_path: &PathBuf,
base_pg_port: i32,
use_release: bool,
target_dir: &Path,
) -> Result<()> {
fs::create_dir_all(data_dir).unwrap();

{
ctrlc::set_handler(move || {
info!("{}", "received Ctrl+C. Shutting down ...");

kill_picodata_instances()
.unwrap_or_else(|e| error!("failed to kill picodata instances: {:#}", e));

exit(0);
})
.context("failed to set Ctrl+c handler")?;
}

let topology: &Topology = &toml::from_str(
&fs::read_to_string(topology_path)
.context(format!("failed to read {}", topology_path.display()))?,
Expand All @@ -196,10 +184,10 @@ pub fn cmd(

let plugins_dir = if use_release {
cargo_build(lib::BuildType::Release)?;
"target/release"
target_dir.join("release")
} else {
cargo_build(lib::BuildType::Debug)?;
"target/debug"
target_dir.join("debug")
};

let first_instance_bin_port = 3001;
Expand All @@ -222,7 +210,7 @@ pub fn cmd(
.to_str()
.context("Invalid data dir path")?,
"--plugin-dir",
plugins_dir,
plugins_dir.to_str().unwrap_or("target"),
"--listen",
&format!("127.0.0.1:{bin_port}"),
"--peer",
Expand Down Expand Up @@ -253,15 +241,54 @@ pub fn cmd(
}

if !disable_plugin_install {
enable_plugins(topology, data_dir, picodata_path, Path::new(plugins_dir))
enable_plugins(topology, data_dir, picodata_path, &plugins_dir)
.inspect_err(|_| {
kill_picodata_instances().unwrap_or_else(|e| {
error!("failed to kill picodata instances: {:#}", e);
});
})
.context("failed to enable plugins")?;
};

Ok(())
}

#[allow(clippy::too_many_arguments)]
pub fn cmd(
topology_path: &PathBuf,
data_dir: &Path,
disable_plugin_install: bool,
base_http_port: i32,
picodata_path: &PathBuf,
base_pg_port: i32,
use_release: bool,
target_dir: &Path,
) -> Result<()> {
fs::create_dir_all(data_dir).unwrap();

{
ctrlc::set_handler(move || {
info!("{}", "received Ctrl+C. Shutting down ...");

kill_picodata_instances()
.unwrap_or_else(|e| error!("failed to kill picodata instances: {:#}", e));

exit(0);
})
.context("failed to set Ctrl+c handler")?;
}

cluster(
topology_path,
data_dir,
disable_plugin_install,
base_http_port,
picodata_path,
base_pg_port,
use_release,
target_dir,
)?;

// Run in the loop until the child processes are killed
// with cargo stop or Ctrl+C signal is recieved
loop {
Expand Down
11 changes: 11 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#![allow(dead_code, clippy::missing_errors_doc, clippy::missing_panics_doc)]
mod commands;

pub mod cluster {
pub use crate::commands::run::cluster as run;
pub use crate::commands::stop::cmd as stop;
}

pub mod config {
pub use crate::commands::config::apply::cmd as apply;
}
13 changes: 11 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@ enum Command {
/// Run release version of plugin
#[arg(long)]
release: bool,
/// Change target folder
#[arg(long, value_name = "TARGET_DIR", default_value = "target")]
target_dir: PathBuf,
// TODO: add demon flag, if true then set output logs to file and release stdin
},
/// Stop Picodata cluster
Expand Down Expand Up @@ -71,6 +74,9 @@ enum Plugin {
/// Pack the archive with debug version of plugin
#[arg(long)]
debug: bool,
/// Change target folder
#[arg(long, value_name = "TARGET_DIR", default_value = "target")]
target_dir: PathBuf,
},
/// Create a new Picodata plugin
New {
Expand Down Expand Up @@ -125,6 +131,7 @@ fn main() -> Result<()> {
picodata_path,
base_pg_port,
release,
target_dir,
} => commands::run::cmd(
&topology,
&data_dir,
Expand All @@ -133,6 +140,7 @@ fn main() -> Result<()> {
&picodata_path,
base_pg_port,
release,
&target_dir,
)
.context("failed to execute Run command")?,
Command::Stop { data_dir } => {
Expand All @@ -142,8 +150,9 @@ fn main() -> Result<()> {
commands::clean::cmd(&data_dir).context("failed to execute \"clean\" command")?;
}
Command::Plugin { command } => match command {
Plugin::Pack { debug } => {
commands::plugin::pack::cmd(debug).context("failed to execute \"pack\" command")?;
Plugin::Pack { debug, target_dir } => {
commands::plugin::pack::cmd(debug, &target_dir)
.context("failed to execute \"pack\" command")?;
}
Plugin::New {
path,
Expand Down
Loading