Skip to content

Commit

Permalink
Add sub-command "rename"
Browse files Browse the repository at this point in the history
  • Loading branch information
ad-si committed Dec 7, 2024
1 parent 4392e51 commit 889a63c
Show file tree
Hide file tree
Showing 5 changed files with 156 additions and 4 deletions.
64 changes: 63 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ serde_derive = "1.0.197"
serde_json = "1.0.115"
textwrap = { version = "0.16.1", features = ["terminal_size"] }
tokio = { version = "1.37.0", features = ["rt-multi-thread", "macros"] }
chrono = "0.4"
xdg = "2.5.2"
futures = "0.3.30"
bat = "0.24.0"
Expand Down
1 change: 1 addition & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ Commands:
- Ollama Llama 3
- Llamafile
changelog Generate a changelog starting from a given commit using OpenAI's GPT-4o
rename Analyze and rename a file with timestamp and description
bash Use Bash development as the prompt context
c Use C development as the prompt context
cpp Use C++ development as the prompt context
Expand Down
60 changes: 60 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -426,6 +426,66 @@ pub async fn generate_changelog(
exec_tool(&Some(&model), &opts, &prompt).await
}

pub async fn analyze_file_content(
_opts: &ExecOptions,
file_path: &str,
) -> Result<String, Box<dyn Error + Send + Sync>> {
let content = std::fs::read_to_string(file_path)?;

let prompt = format!(
"Analyze this file content and provide a very short (2-4 words) description that captures its main purpose:\n\n{}",
content
);

let model = Model::Model(Provider::OpenAI, "gpt-4o-mini".to_string());

let client = reqwest::Client::new();
let xdg_dirs = BaseDirectories::with_prefix("cai").unwrap();
let secrets_path = xdg_dirs
.place_config_file("secrets.yaml")
.expect("Couldn't create configuration directory");
let secrets_path_str = secrets_path.to_str().unwrap();

let config = Config::builder()
.set_default("openai_api_key", env::var("OPENAI_API_KEY").unwrap_or_default())?
.add_source(config::File::with_name(secrets_path_str))
.add_source(config::Environment::with_prefix("CAI"))
.build()
.unwrap();

let full_config = config.try_deserialize::<HashMap<String, String>>().unwrap();
let http_req = get_api_request(&full_config, secrets_path_str, &model)?;

let req_body_obj = {
let mut map = Map::new();
map.insert("model".to_string(), Value::String(http_req.model));
map.insert("max_tokens".to_string(), Value::Number(100.into()));
map.insert(
"messages".to_string(),
Value::Array(vec![Value::Object(Map::from_iter([
("role".to_string(), "user".into()),
("content".to_string(), Value::String(prompt)),
]))]),
);
Value::Object(map)
};

let resp = client
.post(http_req.url)
.json(&req_body_obj)
.bearer_auth(http_req.api_key)
.send()
.await?;

if !resp.status().is_success() {
let resp_json = resp.json::<Value>().await?;
Err(format!("API error: {}", resp_json))?
} else {
let ai_response = resp.json::<AiResponse>().await?;
Ok(ai_response.choices[0].message.content.clone())
}
}

pub async fn prompt_with_lang_cntxt(
opts: &ExecOptions,
prog_lang: &str,
Expand Down
34 changes: 31 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use std::io::stdin;
use std::io::{read_to_string, IsTerminal};

use cai::{
exec_tool, generate_changelog, groq_models_pretty, ollama_models_pretty,
openai_models_pretty, prompt_with_lang_cntxt, submit_prompt, ExecOptions,
Model, Provider,
analyze_file_content, exec_tool, generate_changelog, groq_models_pretty,
ollama_models_pretty, openai_models_pretty, prompt_with_lang_cntxt,
submit_prompt, ExecOptions, Model, Provider,
};
use clap::{builder::styling, crate_version, Parser, Subcommand};
use color_print::cformat;
Expand Down Expand Up @@ -129,6 +129,13 @@ for all supported model ids):"
commit_hash: String,
},

/// Analyze and rename a file with timestamp and description
#[clap()]
Rename {
/// The file to analyze and rename
file: String,
},

/////////////////////////////////////////
//========== LANGUAGE CONTEXTS ==========
/////////////////////////////////////////
Expand Down Expand Up @@ -527,6 +534,27 @@ async fn exec_with_args(args: Args, stdin: &str) {
std::process::exit(1);
}
}
Commands::Rename { file } => {
match analyze_file_content(&opts, &file).await {
Ok(description) => {
let timestamp = chrono::Local::now().format("%Y-%m-%dT%H%M");
let description = description.trim().to_lowercase().replace(' ', "_");
let path = std::path::Path::new(&file);
let extension = path.extension().and_then(|ext| ext.to_str()).unwrap_or("");
let new_name = format!("{}_{}.{}", timestamp, description, extension);

if let Err(err) = std::fs::rename(&file, &new_name) {
eprintln!("Error renaming file: {}", err);
std::process::exit(1);
}
println!("Renamed {} to {}", file, new_name);
}
Err(err) => {
eprintln!("Error analyzing file: {}", err);
std::process::exit(1);
}
}
}
/////////////////////////////////////////
//========== LANGUAGE CONTEXTS ==========
/////////////////////////////////////////
Expand Down

0 comments on commit 889a63c

Please sign in to comment.