Skip to content

Commit

Permalink
Update dependencies of the Elasticsearch package
Browse files Browse the repository at this point in the history
  • Loading branch information
swallez committed Aug 18, 2024
1 parent e04f08e commit fe436a0
Show file tree
Hide file tree
Showing 11 changed files with 80 additions and 152 deletions.
49 changes: 25 additions & 24 deletions elasticsearch/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,31 +26,32 @@ native-tls = ["reqwest/native-tls"]
rustls-tls = ["reqwest/rustls-tls"]

[dependencies]
base64 = "^0.11"
bytes = "^1.0"
dyn-clone = "~1"
lazy_static = "1.4"
percent-encoding = "2.1.0"
reqwest = { version = "~0.11", default-features = false, features = ["gzip", "json"] }
url = "^2.1"
serde = { version = "~1", features = ["derive"] }
serde_json = "~1"
serde_with = "~1"
void = "1.0.2"
base64 = "0.22"
bytes = "1"
dyn-clone = "1"
lazy_static = "1"
percent-encoding = "2"
reqwest = { version = "0.12", default-features = false, features = ["gzip", "json"] }
url = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_with = "1"
void = "1"

[dev-dependencies]
chrono = { version = "^0.4", features = ["serde"] }
clap = "~2"
failure = "0.1.5"
futures = "0.3.1"
http = "0.2"
hyper = { version = "0.14", default-features = false, features = ["tcp", "stream", "server"] }
os_type = "2.2"
regex="1.4"
sysinfo = "0.12.0"
textwrap = "^0.11"
tokio = { version = "1.0", default-features = false, features = ["macros", "net", "time", "rt-multi-thread"] }
xml-rs = "^0.8"
chrono = { version = "0.4", features = ["serde"] }
clap = { version = "4", features = ["env"]}
failure = "0.1"
futures = "0.3"
http = "1"
axum = "0.7"
hyper = { version = "1", features = ["server", "http1"] }
os_type = "2"
regex="1"
#sysinfo = "0.31"
textwrap = "0.16"
tokio = { version = "1", default-features = false, features = ["macros", "net", "time", "rt-multi-thread"] }
xml-rs = "0.8"

[build-dependencies]
rustc_version = "0.2"
rustc_version = "0.4"
12 changes: 0 additions & 12 deletions elasticsearch/examples/cat_indices.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ use elasticsearch::{
http::transport::{SingleNodeConnectionPool, TransportBuilder},
Elasticsearch, Error, DEFAULT_ADDRESS,
};
use sysinfo::SystemExt;
use url::Url;

#[tokio::main]
Expand All @@ -51,12 +50,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
}
}

/// Determines if Fiddler.exe proxy process is running
fn running_proxy() -> bool {
let system = sysinfo::System::new();
!system.get_process_by_name("Fiddler").is_empty()
}

let mut url = Url::parse(cluster_addr().as_ref()).unwrap();

// if the url is https and specifies a username and password, remove from the url and set credentials
Expand Down Expand Up @@ -100,11 +93,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
None => builder,
};

if running_proxy() {
let proxy_url = Url::parse("http://localhost:8888").unwrap();
builder = builder.proxy(proxy_url, None, None);
}

let transport = builder.build()?;
Ok(Elasticsearch::new(transport))
}
52 changes: 16 additions & 36 deletions elasticsearch/examples/index_questions_answers/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
#[macro_use]
extern crate serde_json;

use clap::{App, Arg};
use clap::{Arg, Command};
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
use elasticsearch::cert::CertificateValidation;
use elasticsearch::{
Expand All @@ -31,7 +31,6 @@ use elasticsearch::{
BulkOperation, BulkParts, Elasticsearch, Error, DEFAULT_ADDRESS,
};
use serde_json::Value;
use sysinfo::SystemExt;
use url::Url;

mod stack_overflow;
Expand All @@ -49,58 +48,50 @@ static POSTS_INDEX: &'static str = "posts";
// TODO: Concurrent bulk requests
#[tokio::main]
pub async fn main() -> Result<(), Box<dyn std::error::Error>> {
let matches = App::new("index_questions_answers")
let matches = Command::new("index_questions_answers")
.about(
"indexes Stack Overflow questions and answers into Elasticsearch with the Rust client",
)
.arg(
Arg::with_name("path")
.short("p")
Arg::new("path")
.short('p')
.long("path")
.value_name("PATH")
.help("The path to the Posts.xml file containing questions and answers. Can be obtained from https://archive.org/download/stackexchange/stackoverflow.com-Posts.7z (large file)")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("limit")
.short("l")
Arg::new("limit")
.short('l')
.long("limit")
.value_name("LIMIT")
.help("The number of questions and answers from Posts.xml to index")
.required(false)
.takes_value(true),
)
.arg(
Arg::with_name("size")
.short("s")
Arg::new("size")
.short('s')
.long("size")
.value_name("SIZE")
.help("The number of documents in each bulk request")
.required(false)
.takes_value(true),
)
.arg(
Arg::with_name("delete")
.short("d")
Arg::new("delete")
.short('d')
.long("delete")
.help("Whether to delete the index before indexing")
.required(false)
.takes_value(false),
)
.get_matches();

let path = matches.value_of("path").expect("missing 'path' argument");
let limit = match matches.value_of("limit") {
Some(l) => Some(l.parse::<usize>()?),
_ => None,
};
let size = match matches.value_of("size") {
Some(l) => l.parse::<usize>()?,
_ => 1000,
};
let path = matches
.get_one::<String>("path")
.expect("missing 'path' argument");
let limit = matches.get_one::<usize>("limit").copied();
let size = matches.get_one::<usize>("size").copied().unwrap_or(1000);

let delete = matches.is_present("delete");
let delete = matches.contains_id("delete");
let client = create_client()?;

create_index_if_not_exists(&client, delete).await?;
Expand Down Expand Up @@ -368,12 +359,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
}
}

/// Determines if Fiddler.exe proxy process is running
fn running_proxy() -> bool {
let system = sysinfo::System::new();
!system.get_process_by_name("Fiddler").is_empty()
}

let mut url = Url::parse(cluster_addr().as_ref()).unwrap();

// if the url is https and specifies a username and password, remove from the url and set credentials
Expand Down Expand Up @@ -417,11 +402,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
None => builder,
};

if running_proxy() {
let proxy_url = Url::parse("http://localhost:8888").unwrap();
builder = builder.proxy(proxy_url, None, None);
}

let transport = builder.build()?;
Ok(Elasticsearch::new(transport))
}
12 changes: 0 additions & 12 deletions elasticsearch/examples/search_questions/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ use elasticsearch::{
};
use serde_json::{json, Value};
use std::env;
use sysinfo::SystemExt;
use url::Url;
mod stack_overflow;
use stack_overflow::*;
Expand Down Expand Up @@ -118,12 +117,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
}
}

/// Determines if Fiddler.exe proxy process is running
fn running_proxy() -> bool {
let system = sysinfo::System::new();
!system.get_process_by_name("Fiddler").is_empty()
}

let mut url = Url::parse(cluster_addr().as_ref()).unwrap();

// if the url is https and specifies a username and password, remove from the url and set credentials
Expand Down Expand Up @@ -167,11 +160,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
None => builder,
};

if running_proxy() {
let proxy_url = Url::parse("http://localhost:8888").unwrap();
builder = builder.proxy(proxy_url, None, None);
}

let transport = builder.build()?;
Ok(Elasticsearch::new(transport))
}
12 changes: 0 additions & 12 deletions elasticsearch/examples/search_questions_answers/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ use elasticsearch::{
};
use serde_json::Value;
use std::env;
use sysinfo::SystemExt;
use url::Url;
mod stack_overflow;
use stack_overflow::*;
Expand Down Expand Up @@ -105,12 +104,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
}
}

/// Determines if Fiddler.exe proxy process is running
fn running_proxy() -> bool {
let system = sysinfo::System::new();
!system.get_process_by_name("Fiddler").is_empty()
}

let mut url = Url::parse(cluster_addr().as_ref()).unwrap();

// if the url is https and specifies a username and password, remove from the url and set credentials
Expand Down Expand Up @@ -154,11 +147,6 @@ fn create_client() -> Result<Elasticsearch, Error> {
None => builder,
};

if running_proxy() {
let proxy_url = Url::parse("http://localhost:8888").unwrap();
builder = builder.proxy(proxy_url, None, None);
}

let transport = builder.build()?;
Ok(Elasticsearch::new(transport))
}
18 changes: 9 additions & 9 deletions elasticsearch/src/http/transport.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use crate::{
Method,
},
};
use base64::write::EncoderWriter as Base64Encoder;
use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, write::EncoderWriter, Engine};
use bytes::BytesMut;
use lazy_static::lazy_static;
use serde::Serialize;
Expand Down Expand Up @@ -429,7 +429,7 @@ impl Transport {
Credentials::ApiKey(i, k) => {
let mut header_value = b"ApiKey ".to_vec();
{
let mut encoder = Base64Encoder::new(&mut header_value, base64::STANDARD);
let mut encoder = EncoderWriter::new(&mut header_value, &BASE64_STANDARD);
write!(encoder, "{}:", i).unwrap();
write!(encoder, "{}", k).unwrap();
}
Expand Down Expand Up @@ -561,7 +561,7 @@ impl CloudId {
}

let data = parts[1];
let decoded_result = base64::decode(data);
let decoded_result = BASE64_STANDARD.decode(data);
if decoded_result.is_err() {
return Err(crate::error::lib(format!(
"cannot base 64 decode '{}'",
Expand Down Expand Up @@ -685,7 +685,7 @@ pub mod tests {

#[test]
fn can_parse_cloud_id_with_kibana_uuid() {
let base64 = base64::encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let base64 = BASE64_STANDARD.encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let cloud_id = format!("my_cluster:{}", base64);
let result = CloudId::parse(&cloud_id);
assert!(result.is_ok());
Expand All @@ -699,7 +699,7 @@ pub mod tests {

#[test]
fn can_parse_cloud_id_without_kibana_uuid() {
let base64 = base64::encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$");
let base64 = BASE64_STANDARD.encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$");
let cloud_id = format!("my_cluster:{}", base64);
let result = CloudId::parse(&cloud_id);
assert!(result.is_ok());
Expand All @@ -713,7 +713,7 @@ pub mod tests {

#[test]
fn can_parse_cloud_id_with_different_port() {
let base64 = base64::encode("cloud-endpoint.example:4463$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let base64 = BASE64_STANDARD.encode("cloud-endpoint.example:4463$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let cloud_id = format!("my_cluster:{}", base64);
let result = CloudId::parse(&cloud_id);
assert!(result.is_ok());
Expand All @@ -728,7 +728,7 @@ pub mod tests {

#[test]
fn cloud_id_must_contain_colon() {
let base64 = base64::encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let base64 = BASE64_STANDARD.encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let cloud_id = format!("my_cluster{}", base64);
let cloud = CloudId::parse(&cloud_id);
assert!(cloud.is_err());
Expand All @@ -743,7 +743,7 @@ pub mod tests {

#[test]
fn cloud_id_first_part_cannot_be_empty() {
let base64 = base64::encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let base64 = BASE64_STANDARD.encode("cloud-endpoint.example$3dadf823f05388497ea684236d918a1a$3f26e1609cf54a0f80137a80de560da4");
let cloud_id = format!(":{}", base64);
let cloud = CloudId::parse(&cloud_id);
assert!(cloud.is_err());
Expand All @@ -758,7 +758,7 @@ pub mod tests {

#[test]
fn cloud_id_second_part_must_have_at_least_two_parts() {
let base64 = base64::encode("cloud-endpoint.example");
let base64 = BASE64_STANDARD.encode("cloud-endpoint.example");
let cloud_id = format!("my_cluster:{}", base64);
let result = CloudId::parse(&cloud_id);
assert!(result.is_err());
Expand Down
8 changes: 3 additions & 5 deletions elasticsearch/tests/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,15 @@ use common::*;

use elasticsearch::auth::Credentials;

use base64::{self, write::EncoderWriter as Base64Encoder};
// use std::fs::File;
// use std::io::Read;
use base64::{write::EncoderWriter, engine::general_purpose::STANDARD as BASE64_STANDARD};
use std::io::Write;

#[tokio::test]
async fn basic_auth_header() -> Result<(), failure::Error> {
let server = server::http(move |req| async move {
let mut header_value = b"Basic ".to_vec();
{
let mut encoder = Base64Encoder::new(&mut header_value, base64::STANDARD);
let mut encoder = EncoderWriter::new(&mut header_value, &BASE64_STANDARD);
write!(encoder, "username:password").unwrap();
}

Expand All @@ -56,7 +54,7 @@ async fn api_key_header() -> Result<(), failure::Error> {
let server = server::http(move |req| async move {
let mut header_value = b"ApiKey ".to_vec();
{
let mut encoder = Base64Encoder::new(&mut header_value, base64::STANDARD);
let mut encoder = EncoderWriter::new(&mut header_value, &BASE64_STANDARD);
write!(encoder, "id:api_key").unwrap();
}

Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/tests/cert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ async fn default_certificate_validation() -> Result<(), failure::Error> {
))),
Err(e) => {
let expected = expected_error_message();
let actual = e.to_string();
let actual = format!("{:?}", e);
assert!(
actual.contains(&expected),
"Expected error message to contain '{}' but was '{}'",
Expand Down Expand Up @@ -285,7 +285,7 @@ async fn fail_certificate_certificate_validation() -> Result<(), failure::Error>
))),
Err(e) => {
let expected = expected_error_message();
let actual = e.to_string();
let actual = format!("{:?}", e);
assert!(
actual.contains(&expected),
"Expected error message to contain '{}' but was '{}'",
Expand Down
Loading

0 comments on commit fe436a0

Please sign in to comment.