diff --git a/Cargo.lock b/Cargo.lock index 548c412..df4712a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -168,17 +168,6 @@ dependencies = [ "syn 2.0.52", ] -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -206,6 +195,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + [[package]] name = "base64ct" version = "1.6.0" @@ -379,6 +374,16 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "cookie" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cd91cf61412820176e137621345ee43b3f4423e589e7ae4e50d601d93e35ef8" +dependencies = [ + "time", + "version_check", +] + [[package]] name = "crc32fast" version = "1.4.0" @@ -574,6 +579,15 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + [[package]] name = "futures-channel" version = "0.3.30" @@ -696,31 +710,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" -[[package]] -name = "hexplay" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da1f4f846e8dcc1b5225caf702924816cabd855e4b46115c334ba09d5254a21" -dependencies = [ - "atty", - "termcolor", -] - [[package]] name = "http" version = "1.1.0" @@ -826,6 +821,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "indexmap" version = "2.2.5" @@ -834,6 +839,7 @@ checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" dependencies = [ "equivalent", "hashbrown", + "serde", ] [[package]] @@ -1041,7 +1047,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi", "libc", ] @@ -1104,7 +1110,7 @@ version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" dependencies = [ - "base64", + "base64 0.21.7", "serde", ] @@ -1117,6 +1123,12 @@ dependencies = [ "base64ct", ] +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + [[package]] name = "pin-project" version = "1.1.4" @@ -1182,6 +1194,12 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "pretty-hex" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbc83ee4a840062f368f9096d80077a9841ec117e17e7f700df81958f1451254" + [[package]] name = "pretty_assertions" version = "1.4.0" @@ -1207,26 +1225,35 @@ version = "0.2.0" dependencies = [ "anyhow", "async-compression", + "base64 0.22.0", "bytes", "clap", + "cookie", "dirs", "fancy-regex", - "hexplay", + "futures-util", "http", "http-body-util", "hyper", "hyper-rustls", "hyper-util", + "indexmap", "moka", + "pin-project-lite", + "pretty-hex", "pretty_assertions", "rand", "rcgen", "rsa", "rustls-pemfile", + "serde", + "serde_json", "time", "tokio", "tokio-rustls", + "tokio-stream", "tokio-util", + "url", ] [[package]] @@ -1460,7 +1487,7 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" dependencies = [ - "base64", + "base64 0.21.7", "rustls-pki-types", ] @@ -1537,6 +1564,7 @@ version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ + "indexmap", "itoa", "ryu", "serde", @@ -1687,15 +1715,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - [[package]] name = "thiserror" version = "1.0.57" @@ -1747,6 +1766,21 @@ dependencies = [ "time-core", ] +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" version = "1.36.0" @@ -1787,6 +1821,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + [[package]] name = "tokio-util" version = "0.7.10" @@ -1877,12 +1923,27 @@ dependencies = [ "version_check", ] +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + [[package]] name = "unicode-xid" version = "0.2.4" @@ -1901,6 +1962,17 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + [[package]] name = "utf8parse" version = "0.2.1" diff --git a/Cargo.toml b/Cargo.toml index ef99ab6..bb6ac39 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,25 +13,34 @@ keywords = ["proxy", "mitm", "https", "http"] [dependencies] anyhow = "1.0" async-compression = { version = "0.4.6", features = ["brotli", "gzip", "deflate", "tokio"] } +base64 = "0.22.0" bytes = "1.5" clap = { version = "4.5.1", features = ["derive"] } +cookie = "0.18.0" dirs = "5.0.1" fancy-regex = "0.13.0" -hexplay = "0.3.0" +futures-util = "0.3.30" http = "1.1.0" http-body-util = "0.1" hyper = { version = "1.0", features = ["full"] } hyper-rustls = { version = "0.26.0", default-features = false, features = ["webpki-roots", "webpki-tokio", "ring", "http1", "http2", "tls12"] } hyper-util = { version = "0.1", features = ["server-auto", "client-legacy"] } +indexmap = { version = "2.2.5", features = ["serde"] } moka = { version = "0.12.5", features = ["future"] } +pin-project-lite = "0.2.13" +pretty-hex = "0.4.1" rand = "0.8.5" rcgen = { version = "0.12.0", features = ["x509-parser"] } rsa = "0.9.6" rustls-pemfile = "2.0.0" +serde = { version = "1.0.197", features = ["derive"] } +serde_json = { version = "1.0.114", features = ["preserve_order"] } time = "0.3.34" tokio = { version = "1", features = ["rt-multi-thread", "net", "macros", "fs", "io-util", "signal"]} tokio-util = { version = "0.7", features = ["io-util", "compat"] } tokio-rustls = "0.25.0" +tokio-stream = { version = "0.1.14", default-features = false, features = ["sync"] } +url = "2.5.0" [dev-dependencies] pretty_assertions = "1.4.0" @@ -39,4 +48,4 @@ pretty_assertions = "1.4.0" [profile.release] lto = true strip = true -opt-level = "z" \ No newline at end of file +opt-level = "z" diff --git a/README.md b/README.md index 85f9964..e88f7fe 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# proxyfor +# Proxyfor [![CI](https://github.com/sigoden/proxyfor/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/proxyfor/actions/workflows/ci.yaml) [![Crates](https://img.shields.io/crates/v/proxyfor.svg)](https://crates.io/crates/proxyfor) @@ -10,8 +10,9 @@ A simple and portable proxy for capturing HTTP and HTTPS traffic. - Support forward proxy - Support reverse proxy - Support filtering -- Log traffic to markdown -- Hosted self-signed CA certificate site +- Integrate web inteface +- Integrate certificates installation webapp +- Export in Markdown, cURL, or HAR formats ## Installation @@ -52,7 +53,13 @@ $ curl http://127.0.0.1:8080/ip ![reverse-proxy](https://github.com/sigoden/proxyfor/assets/4012553/789ad353-9fe3-4bff-9f47-f19fd8dc5ce6) -# CLI +## Web Interface + +Proxyfor provides a web-based user interface that allows you to interactively inspect the HTTP traffic. All traffic is kept in memory, which means that it’s intended for small-ish samples. + +![proxyfor-webui](https://github.com/sigoden/proxyfor/assets/4012553/a88b5a11-5191-4b4d-ac61-5ff2e7a70a88) + +## Command Line ``` Usage: proxyfor [OPTIONS] [URL] @@ -64,6 +71,7 @@ Options: -l, --listen Listening ip and port address [default: 0.0.0.0:8080] -f, --filters Only inspect http(s) traffic whose `{method} {uri}` matches the regex -m, --mime-filters Only inspect http(s) traffic whose content-type matches the value + -w, --web Enable web interface -h, --help Print help -V, --version Print version ``` @@ -76,6 +84,12 @@ proxyfor -l 127.0.0.1 proxyfor -l 127.0.0.1:18080 ``` +Enable web inteface with `-w/--web` + +```sh +proxyfor --web +``` + Use `-f/--filters` to filter traffic by matching `{method} {uri}`. ```sh @@ -96,15 +110,9 @@ Pipe it to a markdown file, then view the captured traffic using your favorite e proxyfor > proxyfor.md ``` -Use grep to dump title only - -```sh -proxyfor | grep '^# [A-Z]' -``` - ## Certificates -proxyfor can decrypt encrypted traffic on the fly, as long as the client trusts proxyfor’s built-in certificate authority. Usually this means that the proxyfor CA certificate has to be installed on the client device. +Proxyfor can decrypt encrypted traffic on the fly, as long as the client trusts proxyfor’s built-in certificate authority. Usually this means that the proxyfor CA certificate has to be installed on the client device. By far the easiest way to [install the proxyfor CA certificate](./assets/install-certificate.md) is to use the built-in certificate installation app. To do this, start proxyfor and configure your target device with the correct proxy settings. @@ -116,6 +124,6 @@ Now start a browser on the device, and visit the magic domain [proxyfor.local](h Copyright (c) 2024-∞ proxyfor-developers. -proxyfor is made available under the terms of either the MIT License or the Apache License 2.0, at your option. +Proxyfor is made available under the terms of either the MIT License or the Apache License 2.0, at your option. See the LICENSE-APACHE and LICENSE-MIT files for license details. \ No newline at end of file diff --git a/assets/index.html b/assets/index.html new file mode 100644 index 0000000..bbc7a2b --- /dev/null +++ b/assets/index.html @@ -0,0 +1,663 @@ + + + + + + + proxyfor + + + + + + + + + +
+ + + + + + + + + + +
PathMethodStatus
+
+
+
+
Request
+
Response
+ + +
+
+
+ + + + \ No newline at end of file diff --git a/src/cli.rs b/src/cli.rs index 0328d99..d87704b 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -12,6 +12,9 @@ pub struct Cli { /// Only inspect http(s) traffic whose content-type matches the value #[clap(short = 'm', long, value_name = "VALUE")] pub mime_filters: Vec, + /// Enable web interface + #[clap(short = 'w', long)] + pub web: bool, /// Reverse proxy url #[clap(value_name = "URL")] pub reverse_proxy_url: Option, diff --git a/src/main.rs b/src/main.rs index d7f5bcd..7bb3d15 100644 --- a/src/main.rs +++ b/src/main.rs @@ -4,8 +4,15 @@ mod filter; mod recorder; mod rewind; mod server; +mod state; +mod traffic; -use crate::{certificate_authority::load_ca, cli::Cli, filter::parse_filters, server::Server}; +use crate::{ + certificate_authority::load_ca, + cli::Cli, + filter::parse_filters, + server::{Server, WEB_PREFIX}, +}; use anyhow::{anyhow, Result}; use clap::Parser; @@ -37,18 +44,24 @@ async fn main() -> Result<()> { }); let filters = parse_filters(&cli.filters)?; let mime_filters: Vec = cli.mime_filters.iter().map(|v| v.to_lowercase()).collect(); - let no_filter = filters.is_empty() && mime_filters.is_empty(); let server = Arc::new(Server { reverse_proxy_url, ca, - no_filter, filters, mime_filters, + state: state::State::new(), + web: cli.web, running: running.clone(), }); let handle = run(server, ip, port).await?; let running = Arc::new(AtomicBool::new(true)); - eprintln!("Listening on {}:{}", ip, port); + eprintln!("HTTP(S) proxy listening at {}:{}", ip, port); + if cli.web { + eprintln!( + "Web inteface accessible at http://{}:{}{}/", + ip, port, WEB_PREFIX + ); + } tokio::select! { ret = handle => { if let Err(e) = ret { @@ -86,16 +99,9 @@ where let hyper_service = service_fn(move |request: hyper::Request| handle.clone().handle(request)); - let ret = Builder::new(TokioExecutor::new()) + let _ = Builder::new(TokioExecutor::new()) .serve_connection_with_upgrades(stream, hyper_service) .await; - - if let Err(err) = ret { - match err.downcast_ref::() { - Some(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => {} - _ => eprintln!("Serving connection {}", err), - } - } } fn parse_addr(value: &str) -> Option<(IpAddr, u16)> { diff --git a/src/recorder.rs b/src/recorder.rs index b1d890c..d1575ba 100644 --- a/src/recorder.rs +++ b/src/recorder.rs @@ -1,220 +1,194 @@ -use bytes::Bytes; -use http::{HeaderMap, Method, StatusCode}; +use crate::traffic::{Body, Header, Headers, Traffic}; -const HEX_VIEW_SIZE: usize = 320; +use http::{HeaderMap, StatusCode, Version}; #[derive(Debug)] pub(crate) struct Recorder { - path: String, - method: Method, - req_headers: Option, - req_body: Option, - res_status: Option, - res_headers: Option, - res_body: Option, - error: Option, + traffic: Traffic, + valid: bool, + print_mode: PrintMode, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum PrintMode { + Oneline, + Markdown, } impl Recorder { - pub fn new(path: String, method: Method) -> Self { + pub(crate) fn new(uri: &str, method: &str) -> Self { + let traffic = Traffic::new(uri, method); Self { - path, - method, - req_headers: None, - req_body: None, - res_status: None, - res_headers: None, - res_body: None, - error: None, + traffic, + valid: true, + print_mode: PrintMode::Markdown, } } - // Remove the unused functions - pub fn set_req_headers(mut self, headers: HeaderMap) -> Self { - self.req_headers = Some(headers); + pub(crate) fn set_req_version(&mut self, http_version: &Version) -> &mut Self { + self.traffic.req_version = Some(format!("{http_version:?}")); self } - pub fn set_req_body(mut self, body: Bytes) -> Self { + pub(crate) fn set_req_headers(&mut self, headers: &HeaderMap) -> &mut Self { + self.traffic.req_headers = Some(convert_headers(headers)); + self + } + + pub(crate) fn set_req_body(&mut self, body: &[u8]) -> &mut Self { if body.is_empty() { self } else { - self.req_body = Some(body); + self.traffic.req_body = Some(Body::new(body)); self } } - pub fn set_res_status(mut self, status: StatusCode) -> Self { - self.res_status = Some(status); + pub(crate) fn set_res_status(&mut self, status: StatusCode) -> &mut Self { + self.traffic.status = Some(status.into()); self } - pub fn set_res_headers(mut self, headers: HeaderMap) -> Self { - self.res_headers = Some(headers); + pub(crate) fn set_res_version(&mut self, http_version: &Version) -> &mut Self { + self.traffic.res_version = Some(format!("{http_version:?}")); self } - pub fn set_res_body(mut self, body: Bytes) -> Self { + pub(crate) fn set_res_headers(&mut self, headers: &HeaderMap) -> &mut Self { + self.traffic.res_headers = Some(convert_headers(headers)); + self + } + + pub(crate) fn set_res_body(&mut self, body: &[u8]) -> &mut Self { if body.is_empty() { self } else { - self.res_body = Some(body); + self.traffic.res_body = Some(Body::new(body)); self } } - pub fn set_error(mut self, error: String) -> Self { - self.error = Some(error); + pub(crate) fn add_error(&mut self, error: String) -> &mut Self { + self.traffic.add_error(error); self } - pub fn render(&self) -> String { - let mut lines: Vec = vec![]; - lines.push(format!("\n# {} {}", self.method, self.path)); - - if let Some(headers) = &self.req_headers { - lines.push(render_header("REQUEST HEADERS", headers)); - } - - if let Some(body) = &self.req_body { - lines.push(render_body("REQUEST BODY", body, &self.req_headers)); - } - - if let Some(status) = &self.res_status { - lines.push(format!("RESPONSE STATUS: {status}")); - } - - if let Some(headers) = &self.res_headers { - lines.push(render_header("RESPONSE HEADERS", headers)); - } + pub(crate) fn check_match(&mut self, is_match: bool) -> &mut Self { + self.valid = self.valid && is_match; + self + } - if let Some(body) = &self.res_body { - lines.push(render_body("RESPONSE BODY", body, &self.res_headers)); - } + pub(crate) fn change_print_mode(&mut self, print_mode: PrintMode) -> &mut Self { + self.print_mode = print_mode; + self + } - if let Some(error) = &self.error { - lines.push(render_error(error)); - } - lines.join("\n\n") + pub(crate) fn is_valid(&self) -> bool { + self.valid } - pub fn print(&self) { - println!("{}", self.render()); + pub(crate) fn take_traffic(self) -> Traffic { + self.traffic } -} -fn render_header(title: &str, headers: &HeaderMap) -> String { - let value = headers - .iter() - .map(|(key, value)| format!("{key}: {}", value.to_str().unwrap_or_default())) - .collect::>() - .join("\n"); - format!( - r#"{title} -``` -{value} -```"# - ) + pub(crate) fn print(&self) { + match self.print_mode { + PrintMode::Oneline => { + println!("# {}", self.traffic.oneline()); + } + PrintMode::Markdown => { + println!("{}", self.traffic.markdown(true)); + } + } + } } -fn render_body(title: &str, body: &Bytes, headers: &Option) -> String { - let (body, is_utf8) = render_bytes(body); - let lang = recognize_lang(is_utf8, headers); - format!( - r#"{title} -```{lang} -{body} -```"# - ) +#[derive(Debug)] +pub(crate) struct ErrorRecorder { + recorder: Recorder, } -fn render_error(error: &str) -> String { - if error.contains('\n') { - format!( - r#"ERROR -``` -{} -```"#, - error - ) - } else { - format!("ERROR: {}", error) +impl ErrorRecorder { + pub(crate) fn new(recorder: Recorder) -> Self { + Self { recorder } } -} -fn render_bytes(data: &[u8]) -> (String, bool) { - if let Ok(value) = std::str::from_utf8(data) { - (value.to_string(), true) - } else if data.len() > HEX_VIEW_SIZE * 2 { - let value = format!( - "{}\n......\n{}", - hexplay::HexView::new(&data[0..HEX_VIEW_SIZE]), - hexplay::HexView::new(&data[data.len() - HEX_VIEW_SIZE..]) - ); - (value, false) - } else { - let value = hexplay::HexView::new(data).to_string(); - (value, false) + pub(crate) fn add_error(&mut self, error: String) -> &mut Self { + self.recorder.add_error(error); + self } } -fn recognize_lang(is_utf8: bool, headers: &Option) -> &str { - if !is_utf8 { - return ""; +impl Drop for ErrorRecorder { + fn drop(&mut self) { + if self.recorder.is_valid() { + self.recorder.print(); + } } - headers - .as_ref() - .and_then(|v| v.get("content-type")) - .and_then(|v| v.to_str().ok()) - .map(md_lang) - .unwrap_or_default() } -fn md_lang(content_type: &str) -> &str { - let content_type = match content_type.split_once(';') { - Some((v, _)) => v.trim(), - None => content_type, - }; - if let Some(value) = content_type - .strip_prefix("text/") - .or_else(|| content_type.strip_prefix("application/")) - { - if let Some(value) = value.strip_prefix("x-") { - value - } else { - value - } - } else { - "" - } +fn convert_headers(headers: &HeaderMap) -> Headers { + headers + .iter() + .map(|(key, value)| Header::new(key.as_str(), value.to_str().unwrap_or_default())) + .collect() } #[cfg(test)] mod tests { use super::*; - use http::{header::CONTENT_TYPE, HeaderValue}; + use http::{HeaderName, HeaderValue, Method}; use pretty_assertions::assert_eq; - #[test] - fn test_render() { - let mut req_readers = HeaderMap::new(); - req_readers.insert(CONTENT_TYPE, HeaderValue::from_static("plain/text")); - let mut res_headers = HeaderMap::new(); - res_headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); - let render = Recorder::new("http://example.com/".to_string(), Method::GET) - .set_req_headers(req_readers) - .set_req_body(Bytes::from("req_body")) + fn create_recorder1() -> Recorder { + let mut recorder = Recorder::new("http://example.com/?q1=3", Method::PUT.as_str()); + recorder + .set_req_headers(&create_headers(&[ + ("content-type", "plain/text"), + ("cookie", "c1=1; c2=2"), + ("cookie", "c3=3"), + ])) + .set_req_body("req_body".as_bytes()) .set_res_status(StatusCode::OK) - .set_res_headers(res_headers) - .set_res_body(Bytes::from(r#"{"message":"OK"}"#)) - .set_error("error".to_string()); + .set_res_headers(&create_headers(&[ + ("content-type", "application/json; charset=utf-8"), + ( + "set-cookie", + "sc1=1; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT", + ), + ( + "set-cookie", + "sc2=2; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT", + ), + ])) + .set_res_body(r#"{"message":"OK"}"#.as_bytes()) + .add_error("error".to_string()); + recorder + } + + fn create_headers(list: &[(&'static str, &'static str)]) -> HeaderMap { + let mut headers = HeaderMap::new(); + for (key, value) in list { + headers.append( + HeaderName::from_static(key), + HeaderValue::from_static(value), + ); + } + headers + } + + #[test] + fn test_recorder() { + let recorder = create_recorder1(); let expect = r#" -# GET http://example.com/ +# PUT http://example.com/?q1=3 200 REQUEST HEADERS ``` content-type: plain/text +cookie: c1=1; c2=2 +cookie: c3=3 ``` REQUEST BODY @@ -222,11 +196,11 @@ REQUEST BODY req_body ``` -RESPONSE STATUS: 200 OK - RESPONSE HEADERS ``` -content-type: application/json +content-type: application/json; charset=utf-8 +set-cookie: sc1=1; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT +set-cookie: sc2=2; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT ``` RESPONSE BODY @@ -235,16 +209,6 @@ RESPONSE BODY ``` ERROR: error"#; - assert_eq!(expect, render.render()); - } - - #[test] - fn test_md_lang() { - assert_eq!(md_lang("application/json"), "json"); - assert_eq!(md_lang("application/xml"), "xml"); - assert_eq!(md_lang("application/octet-stream"), "octet-stream"); - assert_eq!(md_lang("application/javascript"), "javascript"); - assert_eq!(md_lang("text/x-rust"), "rust"); - assert_eq!(md_lang("text/css"), "css"); + assert_eq!(recorder.traffic.markdown(true), expect); } } diff --git a/src/server.rs b/src/server.rs index 6c3de3d..f1aee3e 100644 --- a/src/server.rs +++ b/src/server.rs @@ -1,21 +1,26 @@ use crate::{ certificate_authority::CertificateAuthority, filter::{is_match_title, is_match_type, Filter}, - recorder::Recorder, + recorder::{ErrorRecorder, PrintMode, Recorder}, rewind::Rewind, + state::State, }; -use anyhow::Result; +use anyhow::{anyhow, Result}; use async_compression::tokio::write::{BrotliDecoder, DeflateDecoder, GzipDecoder}; use bytes::Bytes; +use futures_util::{stream, StreamExt, TryStreamExt}; use http::{ - header::{CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_TYPE}, + header::{ + CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_TYPE, + PROXY_AUTHORIZATION, + }, uri::{Authority, Scheme}, HeaderValue, }; -use http_body_util::{combinators::BoxBody, BodyExt, Empty, Full}; +use http_body_util::{combinators::BoxBody, BodyExt, Full, StreamBody}; use hyper::{ - body::Incoming, + body::{Frame, Incoming}, header::{CONTENT_ENCODING, HOST}, service::service_fn, Method, StatusCode, Uri, @@ -25,26 +30,30 @@ use hyper_util::{ client::legacy::{connect::HttpConnector, Client}, rt::{TokioExecutor, TokioIo}, }; +use serde::Serialize; use std::sync::{atomic::AtomicBool, Arc}; -use std::{convert::Infallible, fmt::Display}; use tokio::{ io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}, net::TcpStream, }; use tokio_rustls::TlsAcceptor; +use tokio_stream::wrappers::BroadcastStream; -const CERT_SITE_INDEX: &[u8] = include_bytes!("../assets/install-certificate.html"); -const CERT_SITE_URL: &str = "http://proxyfor.local/"; +const WEB_INDEX: &str = include_str!("../assets/index.html"); +const CERT_INDEX: &str = include_str!("../assets/install-certificate.html"); +const CERT_PREFIX: &str = "http://proxyfor.local/"; +pub(crate) const WEB_PREFIX: &str = "/__proxyfor__"; type Request = hyper::Request; -type Response = hyper::Response>; +type Response = hyper::Response>; pub(crate) struct Server { pub(crate) reverse_proxy_url: Option, pub(crate) ca: CertificateAuthority, pub(crate) filters: Vec, pub(crate) mime_filters: Vec, - pub(crate) no_filter: bool, + pub(crate) state: State, + pub(crate) web: bool, #[allow(unused)] pub(crate) running: Arc, } @@ -53,67 +62,105 @@ impl Server { pub(crate) async fn handle(self: Arc, req: Request) -> Result { let mut res = Response::default(); - let path = req.uri().to_string(); + let req_uri = req.uri().to_string(); let req_headers = req.headers().clone(); let method = req.method().clone(); - let url = if !path.starts_with('/') { - path.clone() + let url = if !req_uri.starts_with('/') || req_uri.starts_with(WEB_PREFIX) { + req_uri.clone() } else if let Some(base_url) = &self.reverse_proxy_url { - if path == "/" { + if req_uri == "/" { base_url.clone() } else { - format!("{base_url}{path}") + format!("{base_url}{req_uri}") } } else { - Recorder::new(path.clone(), method.clone()) - .set_error("No reserver proxy url".to_string()) - .print(); - *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; + let mut recorder = Recorder::new(&req_uri, method.as_str()); + if self.web { + recorder.change_print_mode(PrintMode::Oneline); + } + self.internal_server_error(&mut res, "No reserver proxy url", recorder); return Ok(res); }; - if let Some(path) = url.strip_prefix(CERT_SITE_URL) { - return match self.handle_cert_site(&mut res, path).await { - Ok(()) => Ok(res), - Err(err) => { - let body = err.to_string(); - let body = Bytes::from(body); - *res.body_mut() = Full::new(body).boxed(); - *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; - Ok(res) - } + let path = match url.split_once('?') { + Some((v, _)) => v, + None => url.as_str(), + }; + + if let Some(path) = path.strip_prefix(CERT_PREFIX) { + if let Err(err) = self.handle_cert_index(&mut res, path).await { + *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; + set_res_body(&mut res, err.to_string()); + }; + return Ok(res); + } else if let Some(path) = path.strip_prefix(WEB_PREFIX) { + if !self.web { + *res.status_mut() = StatusCode::BAD_REQUEST; + set_res_body( + &mut res, + "The web interface is disabled. To enable it, run the command with the `--web` flag.".to_string(), + ); + return Ok(res); + } + if method != Method::GET { + *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED; + return Ok(res); + } + set_cors_header(&mut res); + let ret = if path.is_empty() || path == "/" { + self.handle_web_index(&mut res).await + } else if path == "/subscribe" { + self.handle_subscribe(&mut res).await + } else if path == "/traffics" { + self.handle_list_traffics(&mut res).await + } else if let Some(id) = path.strip_prefix("/traffic/") { + let query = req.uri().query().unwrap_or_default(); + self.handle_get_traffic(&mut res, id, query).await + } else { + *res.status_mut() = StatusCode::NOT_FOUND; + return Ok(res); }; + if let Err(err) = ret { + *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; + set_res_body(&mut res, err.to_string()); + } + return Ok(res); + } + + let mut recorder = Recorder::new(&req_uri, method.as_str()); + if self.web { + recorder.change_print_mode(PrintMode::Oneline); } - let mut should_print = is_match_title(&self.filters, &format!("{method} {url}")); + let req_version = req.version(); + recorder.set_req_version(&req_version); + + recorder.check_match(is_match_title(&self.filters, &format!("{method} {url}"))); if method == Method::CONNECT { - let recorder = if should_print && !self.no_filter { - Some(Recorder::new(path.clone(), method.clone())) - } else { - None - }; + recorder.check_match(!self.filters.is_empty() || !self.mime_filters.is_empty()); return self.handle_connect(req, recorder); } - let mut recorder = Recorder::new(path.clone(), method.clone()); - - recorder = recorder.set_req_headers(req_headers.clone()); + recorder.set_req_headers(&req_headers); let req_body = match req.collect().await { Ok(v) => v.to_bytes(), Err(err) => { - internal_server_error(&mut res, err, should_print, recorder); + self.internal_server_error(&mut res, err, recorder); return Ok(res); } }; - recorder = recorder.set_req_body(req_body.clone()); + recorder.set_req_body(&req_body); - let mut builder = hyper::Request::builder().uri(&url).method(method.clone()); + let mut builder = hyper::Request::builder() + .uri(&url) + .method(method.clone()) + .version(req_version); for (key, value) in req_headers.iter() { - if key == HOST { + if matches!(key, &HOST | &CONNECTION | &PROXY_AUTHORIZATION) { continue; } builder = builder.header(key.clone(), value.clone()); @@ -122,7 +169,7 @@ impl Server { let proxy_req = match builder.body(Full::new(req_body)) { Ok(v) => v, Err(err) => { - internal_server_error(&mut res, err, should_print, recorder); + self.internal_server_error(&mut res, err, recorder); return Ok(res); } }; @@ -134,7 +181,7 @@ impl Server { HttpsConnectorBuilder::new() .with_webpki_roots() .https_only() - .enable_http1() + .enable_all_versions() .build(), ) .request(proxy_req) @@ -145,7 +192,7 @@ impl Server { let proxy_res = match proxy_res { Ok(v) => v, Err(err) => { - internal_server_error(&mut res, err, should_print, recorder); + self.internal_server_error(&mut res, err, recorder); return Ok(res); } }; @@ -153,14 +200,12 @@ impl Server { let proxy_res_status = proxy_res.status(); let proxy_res_headers = proxy_res.headers().clone(); - if should_print { - if let Some(header_value) = proxy_res_headers - .get(CONTENT_TYPE) - .and_then(|v| v.to_str().ok()) - { - should_print = is_match_type(&self.mime_filters, header_value) - } - }; + if let Some(header_value) = proxy_res_headers + .get(CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + { + recorder.check_match(is_match_type(&self.mime_filters, header_value)); + } *res.status_mut() = proxy_res_status; let mut encoding = ""; @@ -173,59 +218,49 @@ impl Server { res.headers_mut().insert(key.clone(), value.clone()); } + recorder + .set_res_status(proxy_res_status) + .set_res_version(&proxy_res.version()) + .set_res_headers(&proxy_res_headers); + let proxy_res_body = match proxy_res.collect().await { Ok(v) => v.to_bytes(), Err(err) => { - internal_server_error(&mut res, err, should_print, recorder); + self.internal_server_error(&mut res, err, recorder); return Ok(res); } }; - if should_print { - recorder = recorder - .set_res_status(proxy_res_status) - .set_res_headers(proxy_res_headers.clone()); - - if !proxy_res_body.is_empty() { - let decompress_body = decompress(&proxy_res_body, encoding) - .await - .unwrap_or_else(|| proxy_res_body.to_vec()); - recorder = recorder.set_res_body(Bytes::from(decompress_body)); - } - recorder.print(); + if !proxy_res_body.is_empty() && recorder.is_valid() { + let decompress_body = decompress(&proxy_res_body, encoding) + .await + .unwrap_or_else(|| proxy_res_body.to_vec()); + recorder.set_res_body(&decompress_body); } - *res.body_mut() = Full::new(proxy_res_body).boxed(); + self.take_recorder(recorder); + + *res.body_mut() = Full::new(proxy_res_body) + .map_err(|err| anyhow!("{err}")) + .boxed(); Ok(res) } - async fn handle_cert_site(self: Arc, res: &mut Response, path: &str) -> Result<()> { + async fn handle_cert_index(&self, res: &mut Response, path: &str) -> Result<()> { if path.is_empty() { - let body = Bytes::from_static(CERT_SITE_INDEX); - let body_size = body.len(); - *res.body_mut() = Full::new(body).boxed(); + set_res_body(res, CERT_INDEX.to_string()); res.headers_mut().insert( CONTENT_TYPE, HeaderValue::from_static("text/html; charset=UTF-8"), ); - res.headers_mut().insert( - CONTENT_LENGTH, - HeaderValue::from_str(&body_size.to_string())?, - ); } else if path == "proxyfor-ca-cert.cer" || path == "proxyfor-ca-cert.pem" { let body = self.ca.ca_cert_pem(); - let body = Bytes::from(body); - let body_size = body.len(); - *res.body_mut() = Full::new(body).boxed(); + set_res_body(res, body); res.headers_mut().insert( CONTENT_TYPE, HeaderValue::from_static("application/x-x509-ca-cert"), ); - res.headers_mut().insert( - CONTENT_LENGTH, - HeaderValue::from_str(&body_size.to_string())?, - ); res.headers_mut().insert( CONTENT_DISPOSITION, HeaderValue::from_str(&format!(r#"attachment; filename="{path}""#))?, @@ -236,12 +271,86 @@ impl Server { Ok(()) } + async fn handle_web_index(&self, res: &mut Response) -> Result<()> { + set_res_body(res, WEB_INDEX.to_string()); + res.headers_mut().insert( + CONTENT_TYPE, + HeaderValue::from_static("text/html; charset=UTF-8"), + ); + res.headers_mut() + .insert(CACHE_CONTROL, HeaderValue::from_static("no-cache")); + Ok(()) + } + + async fn handle_subscribe(&self, res: &mut Response) -> Result<()> { + let (init_data, receiver) = (self.state.list(), self.state.subscribe()); + let stream = BroadcastStream::new(receiver); + let stream = stream + .map_ok(|head| ndjson_frame(&head)) + .map_err(|err| anyhow!("{err}")); + let body = if init_data.is_empty() { + BodyExt::boxed(StreamBody::new(stream)) + } else { + let init_stream = + stream::iter(init_data.into_iter().map(|head| Ok(ndjson_frame(&head)))); + let combined_stream = init_stream.chain(stream); + BodyExt::boxed(StreamBody::new(combined_stream)) + }; + *res.body_mut() = body; + res.headers_mut().insert( + CONTENT_TYPE, + HeaderValue::from_static("application/x-ndjson; charset=UTF-8"), + ); + res.headers_mut() + .insert(CACHE_CONTROL, HeaderValue::from_static("no-cache")); + Ok(()) + } + + async fn handle_list_traffics(&self, res: &mut Response) -> Result<()> { + set_res_body(res, serde_json::to_string_pretty(&self.state.list())?); + res.headers_mut().insert( + CONTENT_TYPE, + HeaderValue::from_static("application/json; charset=UTF-8"), + ); + res.headers_mut() + .insert(CACHE_CONTROL, HeaderValue::from_static("no-cache")); + Ok(()) + } + + async fn handle_get_traffic(&self, res: &mut Response, id: &str, query: &str) -> Result<()> { + match id.parse().ok().and_then(|id| self.state.get_traffic(id)) { + Some(traffic) => { + match query { + "markdown" | "curl" | "har" | "res-body" => { + let (data, mime) = traffic.export(query)?; + set_res_body(res, data); + res.headers_mut() + .insert(CONTENT_TYPE, HeaderValue::from_str(mime)?); + } + _ => { + set_res_body(res, serde_json::to_string_pretty(&traffic)?); + res.headers_mut().insert( + CONTENT_TYPE, + HeaderValue::from_static("application/json; charset=UTF-8"), + ); + } + } + res.headers_mut() + .insert(CACHE_CONTROL, HeaderValue::from_static("no-cache")); + } + None => { + *res.status_mut() = StatusCode::NOT_FOUND; + } + } + Ok(()) + } + fn handle_connect( self: Arc, mut req: Request, - mut recorder: Option, + recorder: Recorder, ) -> Result { - let mut res = Response::new(BoxBody::new(Empty::new())); + let mut res = Response::default(); let authority = match req.uri().authority().cloned() { Some(authority) => authority, None => { @@ -250,20 +359,17 @@ impl Server { } }; let fut = async move { + let mut recorder = ErrorRecorder::new(recorder); match hyper::upgrade::on(&mut req).await { Ok(upgraded) => { - let mut record_error = |err: String| { - if let Some(recorder) = recorder.take() { - recorder.set_error(err).print() - } - }; let mut upgraded = TokioIo::new(upgraded); let mut buffer = [0; 4]; let bytes_read = match upgraded.read_exact(&mut buffer).await { Ok(bytes_read) => bytes_read, Err(e) => { - record_error(format!("Failed to read from upgraded connection: {e}")); + recorder + .add_error(format!("Failed to read from upgraded connection: {e}")); return; } }; @@ -278,13 +384,13 @@ impl Server { .serve_connect_stream(upgraded, Scheme::HTTP, authority) .await { - record_error(format!("Websocket connect error: {e}")); + recorder.add_error(format!("Websocket connect error: {e}")); } } else if buffer[..2] == *b"\x16\x03" { let server_config = match self.ca.gen_server_config(&authority).await { Ok(server_config) => server_config, Err(e) => { - record_error(format!("Failed to build server config: {e}")); + recorder.add_error(format!("Failed to build server config: {e}")); return; } }; @@ -292,7 +398,8 @@ impl Server { let stream = match TlsAcceptor::from(server_config).accept(upgraded).await { Ok(stream) => stream, Err(e) => { - record_error(format!("Failed to establish TLS Connection: {e}")); + recorder + .add_error(format!("Failed to establish TLS Connection: {e}")); return; } }; @@ -302,11 +409,11 @@ impl Server { .await { if !e.to_string().starts_with("error shutting down connection") { - record_error(format!("HTTPS connect error: {e}")); + recorder.add_error(format!("HTTPS connect error: {e}")); } } } else { - record_error(format!( + recorder.add_error(format!( "Unknown protocol, read '{:02X?}' from upgraded connection", &buffer[..bytes_read] )); @@ -314,7 +421,8 @@ impl Server { let mut server = match TcpStream::connect(authority.as_str()).await { Ok(server) => server, Err(e) => { - record_error(format! {"Failed to connect to {authority}: {e}"}); + recorder + .add_error(format! {"Failed to connect to {authority}: {e}"}); return; } }; @@ -322,7 +430,7 @@ impl Server { if let Err(e) = tokio::io::copy_bidirectional(&mut upgraded, &mut server).await { - record_error(format!( + recorder.add_error(format!( "Failed to tunnel unknown protocol to {}: {}", authority, e )); @@ -330,15 +438,13 @@ impl Server { } } Err(e) => { - if let Some(recorder) = recorder.take() { - recorder.set_error(format!("Upgrade error: {e}")).print(); - } + recorder.add_error(format!("Upgrade error: {e}")); } }; }; tokio::spawn(fut); - Ok(Response::new(BoxBody::new(Empty::new()))) + Ok(Response::default()) } async fn serve_connect_stream( @@ -372,18 +478,55 @@ impl Server { .serve_connection_with_upgrades(TokioIo::new(stream), service) .await } + + fn take_recorder(&self, recorder: Recorder) { + if recorder.is_valid() { + recorder.print(); + self.state.add_traffic(recorder.take_traffic()) + } + } + + fn internal_server_error( + &self, + res: &mut Response, + error: T, + mut recorder: Recorder, + ) { + recorder.add_error(error.to_string()); + self.take_recorder(recorder); + *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; + } } -fn internal_server_error( - res: &mut Response, - err: T, - should_print: bool, - recorder: Recorder, -) { - if should_print { - recorder.set_error(err.to_string()).print(); +fn set_res_body(res: &mut Response, body: String) { + let body = Bytes::from(body); + if let Ok(header_value) = HeaderValue::from_str(&body.len().to_string()) { + res.headers_mut().insert(CONTENT_LENGTH, header_value); } - *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; + *res.body_mut() = Full::new(body).map_err(|err| anyhow!("{err}")).boxed(); +} + +fn set_cors_header(res: &mut Response) { + res.headers_mut().insert( + hyper::header::ACCESS_CONTROL_ALLOW_ORIGIN, + hyper::header::HeaderValue::from_static("*"), + ); + res.headers_mut().insert( + hyper::header::ACCESS_CONTROL_ALLOW_METHODS, + hyper::header::HeaderValue::from_static("GET,POST,PUT,PATCH,DELETE"), + ); + res.headers_mut().insert( + hyper::header::ACCESS_CONTROL_ALLOW_HEADERS, + hyper::header::HeaderValue::from_static("Content-Type,Authorization"), + ); +} + +fn ndjson_frame(head: &T) -> Frame { + let data = match serde_json::to_string(head) { + Ok(data) => format!("{data}\n"), + Err(_) => String::new(), + }; + Frame::data(Bytes::from(data)) } async fn decompress(data: &Bytes, encoding: &str) -> Option> { diff --git a/src/state.rs b/src/state.rs new file mode 100644 index 0000000..9ecb585 --- /dev/null +++ b/src/state.rs @@ -0,0 +1,71 @@ +use crate::traffic::Traffic; + +use indexmap::IndexMap; +use serde::Serialize; +use std::sync::Mutex; +use tokio::sync::broadcast; + +#[derive(Debug)] +pub(crate) struct State { + entries: Mutex>, + tx: broadcast::Sender, +} + +#[derive(Debug, Clone, Serialize)] +pub(crate) struct Head { + id: usize, + method: String, + uri: String, + status: Option, +} + +impl State { + pub(crate) fn new() -> Self { + let (tx, _) = broadcast::channel(16); + Self { + entries: Mutex::new(IndexMap::new()), + tx, + } + } + + pub(crate) fn add_traffic(&self, traffic: Traffic) { + let Ok(mut entries) = self.entries.lock() else { + return; + }; + let id = entries.len() + 1; + let head = Head::new(id, &traffic); + entries.insert(id, traffic); + let _ = self.tx.send(head); + } + + pub(crate) fn get_traffic(&self, id: usize) -> Option { + let entries = self.entries.lock().ok()?; + entries.get(&id).cloned() + } + + pub(crate) fn subscribe(&self) -> broadcast::Receiver { + self.tx.subscribe() + } + + pub(crate) fn list(&self) -> Vec { + let Ok(entries) = self.entries.lock() else { + return vec![]; + }; + entries + .iter() + .map(|(id, traffic)| Head::new(*id, traffic)) + .collect() + } +} + +impl Head { + pub(crate) fn new(id: usize, traffic: &Traffic) -> Self { + let (method, uri, status) = traffic.head(); + Self { + id, + method: method.to_string(), + uri: uri.to_string(), + status, + } + } +} diff --git a/src/traffic.rs b/src/traffic.rs new file mode 100644 index 0000000..d2e6e1c --- /dev/null +++ b/src/traffic.rs @@ -0,0 +1,659 @@ +use anyhow::{bail, Result}; +use base64::{engine::general_purpose::STANDARD, Engine as _}; +use serde::Serialize; +use serde_json::{json, Value}; + +const HEX_VIEW_SIZE: usize = 320; + +#[derive(Debug, Clone, Serialize)] +pub struct Traffic { + pub uri: String, + pub method: String, + pub req_version: Option, + pub req_headers: Option, + pub req_body: Option, + pub status: Option, + pub res_headers: Option, + pub res_version: Option, + pub res_body: Option, + pub error: Option, +} + +impl Traffic { + pub fn new(uri: &str, method: &str) -> Self { + Self { + uri: uri.to_string(), + method: method.to_string(), + req_version: None, + req_headers: None, + req_body: None, + status: None, + res_version: None, + res_headers: None, + res_body: None, + error: None, + } + } + + pub fn add_error(&mut self, error: String) { + match self.error.as_mut() { + Some(current_error) => current_error.push_str(&error), + None => { + self.error = Some(error); + } + } + } + + pub fn head(&self) -> (&str, &str, Option) { + (&self.method, &self.uri, self.status) + } + + pub fn oneline(&self) -> String { + let mut output = format!("{} {}", self.method, self.uri,); + if let Some(status) = self.status { + output.push_str(&format!(" {}", status)); + } + output + } + + pub fn markdown(&self, print: bool) -> String { + let mut lines: Vec = vec![]; + lines.push(format!("\n# {}", self.oneline())); + + if let Some(headers) = &self.req_headers { + lines.push(render_header("REQUEST HEADERS", headers)); + } + + if let Some(body) = &self.req_body { + lines.push(render_body("REQUEST BODY", body, &self.req_headers, print)); + } + + if let Some(headers) = &self.res_headers { + lines.push(render_header("RESPONSE HEADERS", headers)); + } + + if let Some(body) = &self.res_body { + lines.push(render_body("RESPONSE BODY", body, &self.res_headers, print)); + } + + if let Some(error) = &self.error { + lines.push(render_error(error)); + } + lines.join("\n\n") + } + + pub fn har(&self) -> Value { + let request = json!({ + "method": self.method, + "url": self.uri, + "httpVersion": self.req_version, + "cookies": har_req_cookies(&self.req_headers), + "headers": har_headers(&self.req_headers), + "queryString": har_query_string(&self.uri), + "postData": har_body(&self.req_body, &self.req_headers), + "headersSize": -1, + "bodySize": -1, + }); + let response = match self.status { + Some(status) => json!({ + "status": status, + "statusText": "", + "httpVersion": self.res_version, + "cookies": har_res_cookies(&self.res_headers), + "headers": har_headers(&self.res_headers), + "content": har_body(&self.res_body, &self.res_headers), + "redirectURL": get_header_value(&self.res_headers, "location").unwrap_or_default(), + "headersSize": -1, + "bodySize": -1, + }), + None => json!({}), + }; + json!({ + "log": { + "version": "1.2", + "creator": { + "name": "proxyfor", + "version": env!("CARGO_PKG_VERSION"), + "comment": "", + }, + "pages": [], + "entries": [ + { + "request": request, + "response": response + } + ] + } + }) + } + + pub fn curl(&self) -> String { + let mut output = format!("curl {}", self.uri); + let escape_single_quote = |v: &str| v.replace('\'', r#"'\''"#); + if self.method != "GET" { + output.push_str(&format!(" \\\n -X {}", self.method)); + } + for header in self.req_headers.iter().flatten() { + if header.name != "content-length" { + output.push_str(&format!( + " \\\n -H '{}: {}'", + header.name, + escape_single_quote(&header.value) + )) + } + } + if let Some(body) = &self.req_body { + if body.is_utf8() { + output.push_str(&format!(" \\\n -d '{}'", escape_single_quote(&body.value))) + } else { + output.push_str(" \\\n --data-binary @-"); + output = format!( + "echo {} | \\\n base64 --decode | \\\n {}", + body.value, output + ); + } + } + output + } + + pub fn export<'a>(&'a self, format: &str) -> Result<(String, &'a str)> { + match format { + "markdown" => Ok((self.markdown(false), "text/markdown; charset=UTF-8")), + "har" => Ok(( + serde_json::to_string_pretty(&self.har())?, + "application/json; charset=UTF-8", + )), + "curl" => Ok((self.curl(), "text/plain; charset=UTF-8")), + _ => bail!("unsupported format: {}", format), + } + } +} + +pub type Headers = Vec
; + +#[derive(Debug, Clone, Serialize)] +pub struct Header { + pub name: String, + pub value: String, +} + +impl Header { + pub fn new(name: &str, value: &str) -> Self { + Self { + name: name.to_string(), + value: value.to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct Body { + pub encode: String, + pub value: String, +} + +impl Body { + pub fn new(bytes: &[u8]) -> Self { + match std::str::from_utf8(bytes) { + Ok(value) => Body { + encode: "utf8".to_string(), + value: value.to_string(), + }, + Err(_) => Body { + encode: "base64".to_string(), + value: STANDARD.encode(bytes), + }, + } + } + + pub fn is_utf8(&self) -> bool { + self.encode == "utf8" + } +} + +fn render_header(title: &str, headers: &Headers) -> String { + let value = headers + .iter() + .map(|header| format!("{}: {}", header.name, header.value)) + .collect::>() + .join("\n"); + format!( + r#"{title} +``` +{value} +```"# + ) +} + +fn render_body(title: &str, body: &Body, headers: &Option, print: bool) -> String { + let content_type = extract_content_type(headers).unwrap_or_default(); + if body.is_utf8() { + let body_value = &body.value; + let lang = md_lang(content_type); + format!( + r#"{title} +```{lang} +{body_value} +```"# + ) + } else if print { + let Ok(bytes) = STANDARD.decode(&body.value) else { + return String::new(); + }; + let body_bytes = if bytes.len() > HEX_VIEW_SIZE * 2 { + let dots = "⋅".repeat(67); + format!( + "{}\n{}\n{}", + render_bytes(&bytes[0..HEX_VIEW_SIZE]), + dots, + render_bytes(&bytes[bytes.len() - HEX_VIEW_SIZE..]), + ) + } else { + render_bytes(&bytes).to_string() + }; + format!( + r#"{title} +``` +{body_bytes} +```"# + ) + } else { + let body_value = &body.value; + format!( + r#"{title} +``` +data:{content_type};base64,{body_value} +```"# + ) + } +} + +fn render_error(error: &str) -> String { + if error.contains('\n') { + format!( + r#"ERROR +``` +{} +```"#, + error + ) + } else { + format!("ERROR: {}", error) + } +} + +fn render_bytes(source: &[u8]) -> String { + let config = pretty_hex::HexConfig { + title: false, + chunk: 2, + ..Default::default() + }; + + pretty_hex::config_hex(&source, config) +} + +fn har_headers(headers: &Option) -> Value { + match headers { + Some(headers) => headers.iter().map(|header| json!(header)).collect(), + None => json!([]), + } +} + +fn har_query_string(url: &str) -> Value { + match url::Url::parse(url) { + Ok(url) => url + .query_pairs() + .into_iter() + .map(|(k, v)| json!({ "name": &k, "value": &v })) + .collect(), + Err(_) => json!([]), + } +} + +fn har_req_cookies(headers: &Option) -> Value { + match headers { + Some(headers) => headers + .iter() + .filter(|header| header.name == "cookie") + .flat_map(|header| { + header + .value + .split(';') + .map(|v| v.trim()) + .collect::>() + }) + .filter_map(|value| { + value + .split_once('=') + .map(|(k, v)| json!({ "name": k, "value": v })) + }) + .collect(), + None => json!([]), + } +} + +fn har_body(body: &Option, headers: &Option) -> Value { + let content_type = get_header_value(headers, "content-type").unwrap_or_default(); + match body { + Some(body) => { + let mut value = json!({"mimeType": content_type, "text": body.value}); + if !body.is_utf8() { + value["encoding"] = "base64".into(); + } + value + } + None => json!({"mimeType": content_type, "text":""}), + } +} + +fn har_res_cookies(headers: &Option) -> Value { + match headers { + Some(headers) => headers + .iter() + .filter(|header| header.name.as_str() == "set-cookie") + .filter_map(|header| { + cookie::Cookie::parse(&header.value).ok().map(|cookie| { + let mut json_cookie = + json!({ "name": cookie.name(), "value": cookie.value(), }); + if let Some(value) = cookie.path() { + json_cookie["path"] = value.into(); + } + if let Some(value) = cookie.domain() { + json_cookie["domain"] = value.into(); + } + if let Some(cookie::Expiration::DateTime(datetime)) = cookie.expires() { + if let Ok(datetime) = + datetime.format(&time::format_description::well_known::Rfc3339) + { + json_cookie["expries"] = datetime.into(); + } + } + if let Some(value) = cookie.http_only() { + json_cookie["httpOnly"] = value.into(); + } + if let Some(value) = cookie.secure() { + json_cookie["secure"] = value.into(); + } + json_cookie + }) + }) + .collect(), + None => json!([]), + } +} + +fn extract_content_type(headers: &Option) -> Option<&str> { + get_header_value(headers, "content-type").map(|v| match v.split_once(';') { + Some((v, _)) => v.trim(), + None => v, + }) +} + +fn get_header_value<'a>(headers: &'a Option, key: &str) -> Option<&'a str> { + headers.as_ref().and_then(|v| { + v.iter() + .find(|header| header.name == key) + .map(|header| header.value.as_str()) + }) +} + +fn md_lang(content_type: &str) -> &str { + if let Some(value) = content_type + .strip_prefix("text/") + .or_else(|| content_type.strip_prefix("application/")) + { + if let Some(value) = value.strip_prefix("x-") { + value + } else { + value + } + } else { + "" + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + fn create_traffic1() -> Traffic { + Traffic { + uri: "http://example.com/?q1=3".to_string(), + method: "PUT".to_string(), + req_version: None, + req_headers: Some(vec![ + Header::new("content-type", "plain/text"), + Header::new("cookie", "c1=1; c2=2"), + Header::new("cookie", "c3=3"), + ]), + req_body: Some(Body::new(b"req_body")), + status: Some(200), + res_version: None, + res_headers: Some(vec![ + Header::new("content-type", "application/json; charset=utf-8"), + Header::new( + "set-cookie", + "sc1=1; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT", + ), + Header::new( + "set-cookie", + "sc2=2; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT", + ), + ]), + res_body: Some(Body::new(r#"{"message":"OK"}"#.as_bytes())), + error: Some("error".to_string()), + } + } + + #[test] + fn test_render_markdown() { + let traffic = create_traffic1(); + let expect = r#" +# PUT http://example.com/?q1=3 200 + +REQUEST HEADERS +``` +content-type: plain/text +cookie: c1=1; c2=2 +cookie: c3=3 +``` + +REQUEST BODY +``` +req_body +``` + +RESPONSE HEADERS +``` +content-type: application/json; charset=utf-8 +set-cookie: sc1=1; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT +set-cookie: sc2=2; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT +``` + +RESPONSE BODY +```json +{"message":"OK"} +``` + +ERROR: error"#; + assert_eq!(traffic.markdown(false), expect); + } + + #[test] + fn test_render_curl() { + let traffic = create_traffic1(); + let expect = r#"curl http://example.com/?q1=3 \ + -X PUT \ + -H 'content-type: plain/text' \ + -H 'cookie: c1=1; c2=2' \ + -H 'cookie: c3=3' \ + -d 'req_body'"#; + assert_eq!(traffic.curl(), expect); + } + + #[test] + fn test_render_har() { + let traffic = create_traffic1(); + let expect = r#"{ + "log": { + "version": "1.2", + "creator": { + "name": "proxyfor", + "version": "0.2.0", + "comment": "" + }, + "pages": [], + "entries": [ + { + "request": { + "method": "PUT", + "url": "http://example.com/?q1=3", + "httpVersion": null, + "cookies": [ + { + "name": "c1", + "value": "1" + }, + { + "name": "c2", + "value": "2" + }, + { + "name": "c3", + "value": "3" + } + ], + "headers": [ + { + "name": "content-type", + "value": "plain/text" + }, + { + "name": "cookie", + "value": "c1=1; c2=2" + }, + { + "name": "cookie", + "value": "c3=3" + } + ], + "queryString": [ + { + "name": "q1", + "value": "3" + } + ], + "postData": { + "mimeType": "plain/text", + "text": "req_body" + }, + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "", + "httpVersion": null, + "cookies": [ + { + "name": "sc1", + "value": "1", + "path": "/", + "domain": "example.com", + "expries": "2015-10-21T07:28:00Z" + }, + { + "name": "sc2", + "value": "2", + "path": "/", + "domain": "example.com", + "expries": "2015-10-21T07:28:00Z" + } + ], + "headers": [ + { + "name": "content-type", + "value": "application/json; charset=utf-8" + }, + { + "name": "set-cookie", + "value": "sc1=1; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT" + }, + { + "name": "set-cookie", + "value": "sc2=2; path=/; domain=example.com; expires=Wed, 21 Oct 2015 07:28:00 GMT" + } + ], + "content": { + "mimeType": "application/json; charset=utf-8", + "text": "{\"message\":\"OK\"}" + }, + "redirectURL": "", + "headersSize": -1, + "bodySize": -1 + } + } + ] + } +}"#; + assert_eq!( + serde_json::to_string_pretty(&traffic.har()).unwrap(), + expect, + ); + } + + #[test] + fn test_render_body() { + let body = Body::new(&[ + 0x6b, 0x4e, 0x1a, 0xc3, 0xaf, 0x03, 0xd2, 0x1e, 0x7e, 0x73, 0xba, 0xc8, 0xbd, 0x84, + 0x0f, 0x83, + ]); + let output = render_body( + "REQUEST BODY", + &body, + &Some(vec![Header { + name: "content-type".into(), + value: "application/octet-stream".into(), + }]), + false, + ); + let expect = r#"REQUEST BODY +``` +data:application/octet-stream;base64,a04aw68D0h5+c7rIvYQPgw== +```"#; + assert_eq!(output, expect); + } + + #[test] + fn test_render_body_print() { + let body = Body::new(&[ + 0x6b, 0x4e, 0x1a, 0xc3, 0xaf, 0x03, 0xd2, 0x1e, 0x7e, 0x73, 0xba, 0xc8, 0xbd, 0x84, + 0x0f, 0x83, + ]); + let output = render_body( + "REQUEST BODY", + &body, + &Some(vec![Header { + name: "content-type".into(), + value: "plain/text".into(), + }]), + true, + ); + let expect = r#"REQUEST BODY +``` +0000: 6b4e 1ac3 af03 d21e 7e73 bac8 bd84 0f83 kN......~s...... +```"#; + assert_eq!(output, expect); + } + + #[test] + fn test_md_lang() { + assert_eq!(md_lang("application/json"), "json"); + assert_eq!(md_lang("application/xml"), "xml"); + assert_eq!(md_lang("application/octet-stream"), "octet-stream"); + assert_eq!(md_lang("application/javascript"), "javascript"); + assert_eq!(md_lang("text/x-rust"), "rust"); + assert_eq!(md_lang("text/css"), "css"); + } +}