From 25c982dab85226991c6ea58e8b5b0a67afed137a Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Tue, 18 Jul 2023 18:28:43 -0700 Subject: [PATCH 01/16] wrap the send in a tokio timeout with a 5s timeout --- src/sources/util/http_client.rs | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/src/sources/util/http_client.rs b/src/sources/util/http_client.rs index 08f14b878608f..64cd7a70b2701 100644 --- a/src/sources/util/http_client.rs +++ b/src/sources/util/http_client.rs @@ -25,7 +25,7 @@ use crate::{ }, sources::util::http::HttpMethod, tls::TlsSettings, - Error, SourceSender, + SourceSender, }; use vector_common::shutdown::ShutdownSignal; use vector_core::{config::proxy::ProxyConfig, event::Event, EstimatedJsonEncodedSizeOf}; @@ -51,6 +51,9 @@ pub(crate) const fn default_interval() -> Duration { Duration::from_secs(15) } +/// The default timeout for the HTTP request if none is configured. +const DEFAULT_TARGET_TIMEOUT: Duration = Duration::from_secs(5); + /// Builds the context, allowing the source-specific implementation to leverage data from the /// config and the current HTTP request. pub(crate) trait HttpClientBuilder { @@ -157,9 +160,19 @@ pub(crate) async fn call< } let start = Instant::now(); - client - .send(request) - .map_err(Error::from) + let timeout = std::cmp::min(DEFAULT_TARGET_TIMEOUT, inputs.interval); + tokio::time::timeout(timeout, client.send(request)) + .then(move |result| async move { + match result { + Ok(Ok(response)) => Ok(response), + Ok(Err(error)) => Err(error.into()), + Err(_) => Err(format!( + "Timeout error: request exceeded {}s", + timeout.as_secs_f32() + ) + .into()), + } + }) .and_then(|response| async move { let (header, body) = response.into_parts(); let body = hyper::body::to_bytes(body).await?; @@ -224,8 +237,9 @@ pub(crate) async fn call< }) }) .flatten() + .boxed() }) - .flatten() + .flatten_unordered(None) .boxed(); match out.send_event_stream(&mut stream).await { From 4a74b30f6a17894428c963ba75dc6b9082ca0951 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Wed, 19 Jul 2023 09:47:03 -0700 Subject: [PATCH 02/16] make target timeout configurable --- src/sources/http_client/client.rs | 12 +++++++++++- src/sources/http_client/tests.rs | 8 ++++++++ src/sources/prometheus/scrape.rs | 16 ++++++++++++++++ src/sources/util/http_client.rs | 8 ++++++-- 4 files changed, 41 insertions(+), 3 deletions(-) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index fc2a39e0e5743..82496264e4d81 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -20,7 +20,8 @@ use crate::{ sources::util::{ http::HttpMethod, http_client::{ - build_url, call, default_interval, GenericHttpClientInputs, HttpClientBuilder, + build_url, call, default_interval, default_target_timeout, GenericHttpClientInputs, + HttpClientBuilder, }, }, tls::{TlsConfig, TlsSettings}, @@ -58,6 +59,13 @@ pub struct HttpClientConfig { #[configurable(metadata(docs::human_name = "Scrape Interval"))] pub interval: Duration, + /// The timeout for each scrape request, in seconds. + #[serde(default = "default_target_timeout")] + #[serde_as(as = "serde_with::DurationSeconds")] + #[serde(rename = "scrape_target_timeout_secs")] + #[configurable(metadata(docs::human_name = "Scrape Target Timeout"))] + pub target_timeout: Duration, + /// Custom parameters for the HTTP request query string. /// /// One or more values for the same parameter key can be provided. @@ -153,6 +161,7 @@ impl Default for HttpClientConfig { endpoint: "http://localhost:9898/logs".to_string(), query: HashMap::new(), interval: default_interval(), + target_timeout: default_target_timeout(), decoding: default_decoding(), framing: default_framing_message_based(), headers: HashMap::new(), @@ -196,6 +205,7 @@ impl SourceConfig for HttpClientConfig { let inputs = GenericHttpClientInputs { urls, interval: self.interval, + target_timeout: self.target_timeout, headers: self.headers.clone(), content_type, auth: self.auth.clone(), diff --git a/src/sources/http_client/tests.rs b/src/sources/http_client/tests.rs index ecd799a73696b..16a01ec76ee0b 100644 --- a/src/sources/http_client/tests.rs +++ b/src/sources/http_client/tests.rs @@ -16,6 +16,8 @@ use crate::test_util::{ pub(crate) const INTERVAL: Duration = Duration::from_secs(1); +pub(crate) const TARGET_TIMEOUT: Duration = Duration::from_secs(1); + /// The happy path should yield at least one event and must emit the required internal events for sources. pub(crate) async fn run_compliance(config: HttpClientConfig) -> Vec { let events = @@ -47,6 +49,7 @@ async fn bytes_decoding() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, + target_timeout: TARGET_TIMEOUT, query: HashMap::new(), decoding: default_decoding(), framing: default_framing_message_based(), @@ -75,6 +78,7 @@ async fn json_decoding_newline_delimited() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, + target_timeout: TARGET_TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: FramingConfig::NewlineDelimited(Default::default()), @@ -103,6 +107,7 @@ async fn json_decoding_character_delimited() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, + target_timeout: TARGET_TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: FramingConfig::CharacterDelimited(CharacterDelimitedDecoderConfig { @@ -135,6 +140,7 @@ async fn request_query_applied() { let events = run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint?key1=val1", in_addr), interval: INTERVAL, + target_timeout: TARGET_TIMEOUT, query: HashMap::from([ ("key1".to_string(), vec!["val2".to_string()]), ( @@ -203,6 +209,7 @@ async fn headers_applied() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, + target_timeout: TARGET_TIMEOUT, query: HashMap::new(), decoding: default_decoding(), framing: default_framing_message_based(), @@ -234,6 +241,7 @@ async fn accept_header_override() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, + target_timeout: TARGET_TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Bytes, framing: default_framing_message_based(), diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 4a7c66425359e..652bc322dce7a 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -11,6 +11,7 @@ use vector_core::{config::LogNamespace, event::Event}; use super::parser; use crate::sources::util::http::HttpMethod; +use crate::sources::util::http_client::default_target_timeout; use crate::{ config::{GenerateConfig, SourceConfig, SourceContext, SourceOutput}, http::Auth, @@ -60,6 +61,13 @@ pub struct PrometheusScrapeConfig { #[configurable(metadata(docs::human_name = "Scrape Interval"))] interval: Duration, + /// The timeout for each scrape request, in seconds. + #[serde(default = "default_target_timeout")] + #[serde_as(as = "serde_with::DurationSeconds")] + #[serde(rename = "scrape_target_timeout_secs")] + #[configurable(metadata(docs::human_name = "Scrape Target Timeout"))] + target_timeout: Duration, + /// The tag name added to each event representing the scraped instance's `host:port`. /// /// The tag value is the host and port of the scraped instance. @@ -114,6 +122,7 @@ impl GenerateConfig for PrometheusScrapeConfig { toml::Value::try_from(Self { endpoints: vec!["http://localhost:9090/metrics".to_string()], interval: default_interval(), + target_timeout: default_target_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: false, @@ -146,6 +155,7 @@ impl SourceConfig for PrometheusScrapeConfig { let inputs = GenericHttpClientInputs { urls, interval: self.interval, + target_timeout: self.target_timeout, headers: HashMap::new(), content_type: "text/plain".to_string(), auth: self.auth.clone(), @@ -351,6 +361,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), + target_timeout: default_target_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: true, @@ -384,6 +395,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), + target_timeout: default_target_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: true, @@ -435,6 +447,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), + target_timeout: default_target_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: false, @@ -500,6 +513,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), + target_timeout: default_target_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: true, @@ -555,6 +569,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics?key1=val1", in_addr)], interval: Duration::from_secs(1), + target_timeout: default_target_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: false, @@ -668,6 +683,7 @@ mod test { honor_labels: false, query: HashMap::new(), interval: Duration::from_secs(1), + target_timeout: default_target_timeout(), tls: None, auth: None, }, diff --git a/src/sources/util/http_client.rs b/src/sources/util/http_client.rs index 64cd7a70b2701..a64c0ee6a198d 100644 --- a/src/sources/util/http_client.rs +++ b/src/sources/util/http_client.rs @@ -36,6 +36,8 @@ pub(crate) struct GenericHttpClientInputs { pub urls: Vec, /// Interval between calls. pub interval: Duration, + /// Timeout for the HTTP request. + pub target_timeout: Duration, /// Map of Header+Value to apply to HTTP request. pub headers: HashMap>, /// Content type of the HTTP request, determined by the source. @@ -52,7 +54,9 @@ pub(crate) const fn default_interval() -> Duration { } /// The default timeout for the HTTP request if none is configured. -const DEFAULT_TARGET_TIMEOUT: Duration = Duration::from_secs(5); +pub(crate) const fn default_target_timeout() -> Duration { + Duration::from_secs(5) +} /// Builds the context, allowing the source-specific implementation to leverage data from the /// config and the current HTTP request. @@ -160,7 +164,7 @@ pub(crate) async fn call< } let start = Instant::now(); - let timeout = std::cmp::min(DEFAULT_TARGET_TIMEOUT, inputs.interval); + let timeout = std::cmp::min(inputs.target_timeout, inputs.interval); tokio::time::timeout(timeout, client.send(request)) .then(move |result| async move { match result { From 3889e7522a7fc9409fbd0aa642ee6301c0b5d824 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Wed, 19 Jul 2023 20:48:45 -0700 Subject: [PATCH 03/16] creating http clients is actually pretty slow, but cloning it is fast --- src/sources/util/http_client.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/sources/util/http_client.rs b/src/sources/util/http_client.rs index a64c0ee6a198d..4a049c1cf483a 100644 --- a/src/sources/util/http_client.rs +++ b/src/sources/util/http_client.rs @@ -121,15 +121,16 @@ pub(crate) async fn call< mut out: SourceSender, http_method: HttpMethod, ) -> Result<(), ()> { + // Building the HttpClient should not fail as it is just setting up the client with the + // proxy and tls settings. + let client = + HttpClient::new(inputs.tls.clone(), &inputs.proxy).expect("Building HTTP client failed"); let mut stream = IntervalStream::new(tokio::time::interval(inputs.interval)) .take_until(inputs.shutdown) .map(move |_| stream::iter(inputs.urls.clone())) .flatten() .map(move |url| { - // Building the HttpClient should not fail as it is just setting up the client with the - // proxy and tls settings. - let client = HttpClient::new(inputs.tls.clone(), &inputs.proxy) - .expect("Building HTTP client failed"); + let client = client.clone(); let endpoint = url.to_string(); let context_builder = context_builder.clone(); From 3b06af5c33bfd6f505485adccd133aad91ee74d6 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 10:22:18 -0700 Subject: [PATCH 04/16] Update src/sources/http_client/client.rs Co-authored-by: Doug Smith --- src/sources/http_client/client.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 82496264e4d81..f3b62fd4a57c5 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -61,7 +61,7 @@ pub struct HttpClientConfig { /// The timeout for each scrape request, in seconds. #[serde(default = "default_target_timeout")] - #[serde_as(as = "serde_with::DurationSeconds")] + #[serde_as(as = "serde_with::DurationSecondsWithFrac")] #[serde(rename = "scrape_target_timeout_secs")] #[configurable(metadata(docs::human_name = "Scrape Target Timeout"))] pub target_timeout: Duration, From d753d5ff51210b2fb447c5bed0ca4fccceb09632 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 10:22:24 -0700 Subject: [PATCH 05/16] Update src/sources/prometheus/scrape.rs Co-authored-by: Doug Smith --- src/sources/prometheus/scrape.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 652bc322dce7a..604635f632fe9 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -63,7 +63,7 @@ pub struct PrometheusScrapeConfig { /// The timeout for each scrape request, in seconds. #[serde(default = "default_target_timeout")] - #[serde_as(as = "serde_with::DurationSeconds")] + #[serde_as(as = "serde_with:: DurationSecondsWithFrac")] #[serde(rename = "scrape_target_timeout_secs")] #[configurable(metadata(docs::human_name = "Scrape Target Timeout"))] target_timeout: Duration, From 996bca73a1d20b6a9cd9f92e7a69974cff48ccd9 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 10:26:00 -0700 Subject: [PATCH 06/16] update config names and interval behavior --- src/sources/http_client/client.rs | 10 +++++----- src/sources/prometheus/scrape.rs | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index f3b62fd4a57c5..09a562eca176d 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -52,18 +52,18 @@ pub struct HttpClientConfig { #[configurable(metadata(docs::examples = "http://127.0.0.1:9898/logs"))] pub endpoint: String, - /// The interval between calls. + /// The interval between scrapes. Requests run concurrently. #[serde(default = "default_interval")] #[serde_as(as = "serde_with::DurationSeconds")] #[serde(rename = "scrape_interval_secs")] #[configurable(metadata(docs::human_name = "Scrape Interval"))] pub interval: Duration, - /// The timeout for each scrape request, in seconds. + /// The timeout for each scrape request. #[serde(default = "default_target_timeout")] - #[serde_as(as = "serde_with::DurationSecondsWithFrac")] - #[serde(rename = "scrape_target_timeout_secs")] - #[configurable(metadata(docs::human_name = "Scrape Target Timeout"))] + #[serde_as(as = "serde_with:: DurationSecondsWithFrac")] + #[serde(rename = "scrape_timeout")] + #[configurable(metadata(docs::human_name = "Scrape Timeout"))] pub target_timeout: Duration, /// Custom parameters for the HTTP request query string. diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 604635f632fe9..bb9e6402c2b27 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -54,18 +54,18 @@ pub struct PrometheusScrapeConfig { #[serde(alias = "hosts")] endpoints: Vec, - /// The interval between scrapes, in seconds. + /// The interval between scrapes. Requests run concurrently. #[serde(default = "default_interval")] #[serde_as(as = "serde_with::DurationSeconds")] #[serde(rename = "scrape_interval_secs")] #[configurable(metadata(docs::human_name = "Scrape Interval"))] interval: Duration, - /// The timeout for each scrape request, in seconds. + /// The timeout for each scrape request. #[serde(default = "default_target_timeout")] #[serde_as(as = "serde_with:: DurationSecondsWithFrac")] - #[serde(rename = "scrape_target_timeout_secs")] - #[configurable(metadata(docs::human_name = "Scrape Target Timeout"))] + #[serde(rename = "scrape_timeout")] + #[configurable(metadata(docs::human_name = "Scrape Timeout"))] target_timeout: Duration, /// The tag name added to each event representing the scraped instance's `host:port`. From bf8b3ddd6e3fb80e79c2ddf10cac67fbc230bf5e Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 10:27:58 -0700 Subject: [PATCH 07/16] let user specify timeout directly, no extra logic --- src/sources/util/http_client.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/sources/util/http_client.rs b/src/sources/util/http_client.rs index 4a049c1cf483a..fbe92ccbeeac7 100644 --- a/src/sources/util/http_client.rs +++ b/src/sources/util/http_client.rs @@ -165,15 +165,14 @@ pub(crate) async fn call< } let start = Instant::now(); - let timeout = std::cmp::min(inputs.target_timeout, inputs.interval); - tokio::time::timeout(timeout, client.send(request)) + tokio::time::timeout(inputs.target_timeout, client.send(request)) .then(move |result| async move { match result { Ok(Ok(response)) => Ok(response), Ok(Err(error)) => Err(error.into()), Err(_) => Err(format!( "Timeout error: request exceeded {}s", - timeout.as_secs_f32() + inputs.target_timeout.as_secs_f64() ) .into()), } From 55d8da1093311fb5e5fbaa5631f0a47b303f00c2 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 11:15:17 -0700 Subject: [PATCH 08/16] warn if timeout is larger than scrape interval --- src/sources/http_client/client.rs | 8 ++++++++ src/sources/prometheus/scrape.rs | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 09a562eca176d..c23bcff627a81 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -202,6 +202,14 @@ impl SourceConfig for HttpClientConfig { log_namespace, }; + if self.target_timeout > self.interval { + warn!( + interval_secs = %self.interval.as_secs_f64(), + target_timeout_secs = %self.target_timeout.as_secs_f64(), + message = "Having a scrape timeout that exceeds the scrape interval can lead to excessive resource consumption.", + ); + } + let inputs = GenericHttpClientInputs { urls, interval: self.interval, diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index bb9e6402c2b27..9dfcf88f1bab2 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -152,6 +152,14 @@ impl SourceConfig for PrometheusScrapeConfig { endpoint_tag: self.endpoint_tag.clone(), }; + if self.target_timeout > self.interval { + warn!( + interval_secs = %self.interval.as_secs_f64(), + target_timeout_secs = %self.target_timeout.as_secs_f64(), + message = "Having a scrape timeout that exceeds the scrape interval can lead to excessive resource consumption.", + ); + } + let inputs = GenericHttpClientInputs { urls, interval: self.interval, From 1e38e01408351473460f02724465a28dad976ab5 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 11:24:46 -0700 Subject: [PATCH 09/16] updated docs --- .../reference/components/sources/base/http_client.cue | 10 +++++++++- .../components/sources/base/prometheus_scrape.cue | 10 +++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/website/cue/reference/components/sources/base/http_client.cue b/website/cue/reference/components/sources/base/http_client.cue index efe28fb6a7827..e78984aca8c2b 100644 --- a/website/cue/reference/components/sources/base/http_client.cue +++ b/website/cue/reference/components/sources/base/http_client.cue @@ -309,13 +309,21 @@ base: components: sources: http_client: configuration: { } } scrape_interval_secs: { - description: "The interval between calls." + description: "The interval between scrapes. Requests run concurrently." required: false type: uint: { default: 15 unit: "seconds" } } + scrape_timeout: { + description: "The timeout for each scrape request." + required: false + type: float: { + default: 5.0 + unit: "seconds" + } + } tls: { description: "TLS configuration." required: false diff --git a/website/cue/reference/components/sources/base/prometheus_scrape.cue b/website/cue/reference/components/sources/base/prometheus_scrape.cue index f0c9b02351f33..12caae1450aa4 100644 --- a/website/cue/reference/components/sources/base/prometheus_scrape.cue +++ b/website/cue/reference/components/sources/base/prometheus_scrape.cue @@ -104,13 +104,21 @@ base: components: sources: prometheus_scrape: configuration: { } } scrape_interval_secs: { - description: "The interval between scrapes, in seconds." + description: "The interval between scrapes. Requests run concurrently." required: false type: uint: { default: 15 unit: "seconds" } } + scrape_timeout: { + description: "The timeout for each scrape request." + required: false + type: float: { + default: 5.0 + unit: "seconds" + } + } tls: { description: "TLS configuration." required: false From 71fb84aa87dbfe6050a9cb79f0625a24d7001be1 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 11:34:50 -0700 Subject: [PATCH 10/16] rename to have _secs suffix for consistency with interval --- src/sources/http_client/client.rs | 2 +- src/sources/prometheus/scrape.rs | 2 +- website/cue/reference/components/sources/base/http_client.cue | 2 +- .../cue/reference/components/sources/base/prometheus_scrape.cue | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index c23bcff627a81..452a3d62c6f3c 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -62,7 +62,7 @@ pub struct HttpClientConfig { /// The timeout for each scrape request. #[serde(default = "default_target_timeout")] #[serde_as(as = "serde_with:: DurationSecondsWithFrac")] - #[serde(rename = "scrape_timeout")] + #[serde(rename = "scrape_timeout_secs")] #[configurable(metadata(docs::human_name = "Scrape Timeout"))] pub target_timeout: Duration, diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 9dfcf88f1bab2..33a4faeb9a963 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -64,7 +64,7 @@ pub struct PrometheusScrapeConfig { /// The timeout for each scrape request. #[serde(default = "default_target_timeout")] #[serde_as(as = "serde_with:: DurationSecondsWithFrac")] - #[serde(rename = "scrape_timeout")] + #[serde(rename = "scrape_timeout_secs")] #[configurable(metadata(docs::human_name = "Scrape Timeout"))] target_timeout: Duration, diff --git a/website/cue/reference/components/sources/base/http_client.cue b/website/cue/reference/components/sources/base/http_client.cue index e78984aca8c2b..7613c8ddd4170 100644 --- a/website/cue/reference/components/sources/base/http_client.cue +++ b/website/cue/reference/components/sources/base/http_client.cue @@ -316,7 +316,7 @@ base: components: sources: http_client: configuration: { unit: "seconds" } } - scrape_timeout: { + scrape_timeout_secs: { description: "The timeout for each scrape request." required: false type: float: { diff --git a/website/cue/reference/components/sources/base/prometheus_scrape.cue b/website/cue/reference/components/sources/base/prometheus_scrape.cue index 12caae1450aa4..ad479e4313c83 100644 --- a/website/cue/reference/components/sources/base/prometheus_scrape.cue +++ b/website/cue/reference/components/sources/base/prometheus_scrape.cue @@ -111,7 +111,7 @@ base: components: sources: prometheus_scrape: configuration: { unit: "seconds" } } - scrape_timeout: { + scrape_timeout_secs: { description: "The timeout for each scrape request." required: false type: float: { From ca322e779ca047529fcef990d3bf595861bcb2c8 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 15:00:41 -0700 Subject: [PATCH 11/16] DRY up interval warning --- src/sources/http_client/client.rs | 12 +++--------- src/sources/prometheus/scrape.rs | 10 ++-------- src/sources/util/http_client.rs | 11 +++++++++++ 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 452a3d62c6f3c..f8cebf739c32f 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -20,8 +20,8 @@ use crate::{ sources::util::{ http::HttpMethod, http_client::{ - build_url, call, default_interval, default_target_timeout, GenericHttpClientInputs, - HttpClientBuilder, + build_url, call, default_interval, default_target_timeout, warn_if_interval_too_low, + GenericHttpClientInputs, HttpClientBuilder, }, }, tls::{TlsConfig, TlsSettings}, @@ -202,13 +202,7 @@ impl SourceConfig for HttpClientConfig { log_namespace, }; - if self.target_timeout > self.interval { - warn!( - interval_secs = %self.interval.as_secs_f64(), - target_timeout_secs = %self.target_timeout.as_secs_f64(), - message = "Having a scrape timeout that exceeds the scrape interval can lead to excessive resource consumption.", - ); - } + warn_if_interval_too_low(self.target_timeout, self.interval); let inputs = GenericHttpClientInputs { urls, diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 33a4faeb9a963..6d5c6fb60b9fb 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -11,7 +11,7 @@ use vector_core::{config::LogNamespace, event::Event}; use super::parser; use crate::sources::util::http::HttpMethod; -use crate::sources::util::http_client::default_target_timeout; +use crate::sources::util::http_client::{default_target_timeout, warn_if_interval_too_low}; use crate::{ config::{GenerateConfig, SourceConfig, SourceContext, SourceOutput}, http::Auth, @@ -152,13 +152,7 @@ impl SourceConfig for PrometheusScrapeConfig { endpoint_tag: self.endpoint_tag.clone(), }; - if self.target_timeout > self.interval { - warn!( - interval_secs = %self.interval.as_secs_f64(), - target_timeout_secs = %self.target_timeout.as_secs_f64(), - message = "Having a scrape timeout that exceeds the scrape interval can lead to excessive resource consumption.", - ); - } + warn_if_interval_too_low(self.target_timeout, self.interval); let inputs = GenericHttpClientInputs { urls, diff --git a/src/sources/util/http_client.rs b/src/sources/util/http_client.rs index fbe92ccbeeac7..126c941a34243 100644 --- a/src/sources/util/http_client.rs +++ b/src/sources/util/http_client.rs @@ -108,6 +108,17 @@ pub(crate) fn build_url(uri: &Uri, query: &HashMap>) -> Uri .expect("Failed to build URI from parsed arguments") } +/// Warns if the scrape timeout is greater than the scrape interval. +pub(crate) fn warn_if_interval_too_low(timeout: Duration, interval: Duration) { + if timeout > interval { + warn!( + interval_secs = %interval.as_secs_f64(), + timeout_secs = %timeout.as_secs_f64(), + message = "Having a scrape timeout that exceeds the scrape interval can lead to excessive resource consumption.", + ); + } +} + /// Calls one or more urls at an interval. /// - The HTTP request is built per the options in provided generic inputs. /// - The HTTP response is decoded/parsed into events by the specific context. From 3a110ee35d3a7711102df5d2d7031561ec834d29 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Thu, 20 Jul 2023 15:04:49 -0700 Subject: [PATCH 12/16] rename target_timeout to timeout --- src/sources/http_client/client.rs | 12 ++++++------ src/sources/http_client/tests.rs | 14 +++++++------- src/sources/prometheus/scrape.rs | 24 ++++++++++++------------ src/sources/util/http_client.rs | 8 ++++---- 4 files changed, 29 insertions(+), 29 deletions(-) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index f8cebf739c32f..3b852458b8b2d 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -20,7 +20,7 @@ use crate::{ sources::util::{ http::HttpMethod, http_client::{ - build_url, call, default_interval, default_target_timeout, warn_if_interval_too_low, + build_url, call, default_interval, default_timeout, warn_if_interval_too_low, GenericHttpClientInputs, HttpClientBuilder, }, }, @@ -60,11 +60,11 @@ pub struct HttpClientConfig { pub interval: Duration, /// The timeout for each scrape request. - #[serde(default = "default_target_timeout")] + #[serde(default = "default_timeout")] #[serde_as(as = "serde_with:: DurationSecondsWithFrac")] #[serde(rename = "scrape_timeout_secs")] #[configurable(metadata(docs::human_name = "Scrape Timeout"))] - pub target_timeout: Duration, + pub timeout: Duration, /// Custom parameters for the HTTP request query string. /// @@ -161,7 +161,7 @@ impl Default for HttpClientConfig { endpoint: "http://localhost:9898/logs".to_string(), query: HashMap::new(), interval: default_interval(), - target_timeout: default_target_timeout(), + timeout: default_timeout(), decoding: default_decoding(), framing: default_framing_message_based(), headers: HashMap::new(), @@ -202,12 +202,12 @@ impl SourceConfig for HttpClientConfig { log_namespace, }; - warn_if_interval_too_low(self.target_timeout, self.interval); + warn_if_interval_too_low(self.timeout, self.interval); let inputs = GenericHttpClientInputs { urls, interval: self.interval, - target_timeout: self.target_timeout, + timeout: self.timeout, headers: self.headers.clone(), content_type, auth: self.auth.clone(), diff --git a/src/sources/http_client/tests.rs b/src/sources/http_client/tests.rs index 16a01ec76ee0b..bb97289807fbd 100644 --- a/src/sources/http_client/tests.rs +++ b/src/sources/http_client/tests.rs @@ -16,7 +16,7 @@ use crate::test_util::{ pub(crate) const INTERVAL: Duration = Duration::from_secs(1); -pub(crate) const TARGET_TIMEOUT: Duration = Duration::from_secs(1); +pub(crate) const TIMEOUT: Duration = Duration::from_secs(1); /// The happy path should yield at least one event and must emit the required internal events for sources. pub(crate) async fn run_compliance(config: HttpClientConfig) -> Vec { @@ -49,7 +49,7 @@ async fn bytes_decoding() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, - target_timeout: TARGET_TIMEOUT, + timeout: TIMEOUT, query: HashMap::new(), decoding: default_decoding(), framing: default_framing_message_based(), @@ -78,7 +78,7 @@ async fn json_decoding_newline_delimited() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, - target_timeout: TARGET_TIMEOUT, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: FramingConfig::NewlineDelimited(Default::default()), @@ -107,7 +107,7 @@ async fn json_decoding_character_delimited() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, - target_timeout: TARGET_TIMEOUT, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: FramingConfig::CharacterDelimited(CharacterDelimitedDecoderConfig { @@ -140,7 +140,7 @@ async fn request_query_applied() { let events = run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint?key1=val1", in_addr), interval: INTERVAL, - target_timeout: TARGET_TIMEOUT, + timeout: TIMEOUT, query: HashMap::from([ ("key1".to_string(), vec!["val2".to_string()]), ( @@ -209,7 +209,7 @@ async fn headers_applied() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, - target_timeout: TARGET_TIMEOUT, + timeout: TIMEOUT, query: HashMap::new(), decoding: default_decoding(), framing: default_framing_message_based(), @@ -241,7 +241,7 @@ async fn accept_header_override() { run_compliance(HttpClientConfig { endpoint: format!("http://{}/endpoint", in_addr), interval: INTERVAL, - target_timeout: TARGET_TIMEOUT, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Bytes, framing: default_framing_message_based(), diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 6d5c6fb60b9fb..747ba9649f1d7 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -11,7 +11,7 @@ use vector_core::{config::LogNamespace, event::Event}; use super::parser; use crate::sources::util::http::HttpMethod; -use crate::sources::util::http_client::{default_target_timeout, warn_if_interval_too_low}; +use crate::sources::util::http_client::{default_timeout, warn_if_interval_too_low}; use crate::{ config::{GenerateConfig, SourceConfig, SourceContext, SourceOutput}, http::Auth, @@ -62,11 +62,11 @@ pub struct PrometheusScrapeConfig { interval: Duration, /// The timeout for each scrape request. - #[serde(default = "default_target_timeout")] + #[serde(default = "default_timeout")] #[serde_as(as = "serde_with:: DurationSecondsWithFrac")] #[serde(rename = "scrape_timeout_secs")] #[configurable(metadata(docs::human_name = "Scrape Timeout"))] - target_timeout: Duration, + timeout: Duration, /// The tag name added to each event representing the scraped instance's `host:port`. /// @@ -122,7 +122,7 @@ impl GenerateConfig for PrometheusScrapeConfig { toml::Value::try_from(Self { endpoints: vec!["http://localhost:9090/metrics".to_string()], interval: default_interval(), - target_timeout: default_target_timeout(), + timeout: default_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: false, @@ -152,12 +152,12 @@ impl SourceConfig for PrometheusScrapeConfig { endpoint_tag: self.endpoint_tag.clone(), }; - warn_if_interval_too_low(self.target_timeout, self.interval); + warn_if_interval_too_low(self.timeout, self.interval); let inputs = GenericHttpClientInputs { urls, interval: self.interval, - target_timeout: self.target_timeout, + timeout: self.timeout, headers: HashMap::new(), content_type: "text/plain".to_string(), auth: self.auth.clone(), @@ -363,7 +363,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), - target_timeout: default_target_timeout(), + timeout: default_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: true, @@ -397,7 +397,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), - target_timeout: default_target_timeout(), + timeout: default_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: true, @@ -449,7 +449,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), - target_timeout: default_target_timeout(), + timeout: default_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: false, @@ -515,7 +515,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics", in_addr)], interval: Duration::from_secs(1), - target_timeout: default_target_timeout(), + timeout: default_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: true, @@ -571,7 +571,7 @@ mod test { let config = PrometheusScrapeConfig { endpoints: vec![format!("http://{}/metrics?key1=val1", in_addr)], interval: Duration::from_secs(1), - target_timeout: default_target_timeout(), + timeout: default_timeout(), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: false, @@ -685,7 +685,7 @@ mod test { honor_labels: false, query: HashMap::new(), interval: Duration::from_secs(1), - target_timeout: default_target_timeout(), + timeout: default_timeout(), tls: None, auth: None, }, diff --git a/src/sources/util/http_client.rs b/src/sources/util/http_client.rs index 126c941a34243..25678a90ae344 100644 --- a/src/sources/util/http_client.rs +++ b/src/sources/util/http_client.rs @@ -37,7 +37,7 @@ pub(crate) struct GenericHttpClientInputs { /// Interval between calls. pub interval: Duration, /// Timeout for the HTTP request. - pub target_timeout: Duration, + pub timeout: Duration, /// Map of Header+Value to apply to HTTP request. pub headers: HashMap>, /// Content type of the HTTP request, determined by the source. @@ -54,7 +54,7 @@ pub(crate) const fn default_interval() -> Duration { } /// The default timeout for the HTTP request if none is configured. -pub(crate) const fn default_target_timeout() -> Duration { +pub(crate) const fn default_timeout() -> Duration { Duration::from_secs(5) } @@ -176,14 +176,14 @@ pub(crate) async fn call< } let start = Instant::now(); - tokio::time::timeout(inputs.target_timeout, client.send(request)) + tokio::time::timeout(inputs.timeout, client.send(request)) .then(move |result| async move { match result { Ok(Ok(response)) => Ok(response), Ok(Err(error)) => Err(error.into()), Err(_) => Err(format!( "Timeout error: request exceeded {}s", - inputs.target_timeout.as_secs_f64() + inputs.timeout.as_secs_f64() ) .into()), } From 2955e582adf9b1eed2967b0e5ab71c03adcda392 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Fri, 21 Jul 2023 11:08:14 -0700 Subject: [PATCH 13/16] Update src/sources/prometheus/scrape.rs Co-authored-by: Stephen Wakely --- src/sources/prometheus/scrape.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 747ba9649f1d7..35aa5f5f75d6e 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -54,7 +54,9 @@ pub struct PrometheusScrapeConfig { #[serde(alias = "hosts")] endpoints: Vec, - /// The interval between scrapes. Requests run concurrently. + /// The interval between scrapes. Requests are run concurrently so if a scrape takes longer + /// than the interval a new scrape will be started. This can take extra resources, set the timeout + /// to a value lower than the scrape interval to prevent this from happening. #[serde(default = "default_interval")] #[serde_as(as = "serde_with::DurationSeconds")] #[serde(rename = "scrape_interval_secs")] From ca0869e2209f5790d748551e269804f73269b03b Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Fri, 21 Jul 2023 11:11:48 -0700 Subject: [PATCH 14/16] similar comment in http_client --- src/sources/http_client/client.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 3b852458b8b2d..481f1c697af84 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -52,7 +52,9 @@ pub struct HttpClientConfig { #[configurable(metadata(docs::examples = "http://127.0.0.1:9898/logs"))] pub endpoint: String, - /// The interval between scrapes. Requests run concurrently. + /// The interval between scrapes. Requests are run concurrently so if a scrape takes longer + /// than the interval a new scrape will be started. This can take extra resources, set the timeout + /// to a value lower than the scrape interval to prevent this from happening. #[serde(default = "default_interval")] #[serde_as(as = "serde_with::DurationSeconds")] #[serde(rename = "scrape_interval_secs")] From 57fd7f268f7ee6540d9348ec4b3e2c6ef16d9f96 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Fri, 21 Jul 2023 11:13:43 -0700 Subject: [PATCH 15/16] regenerate docs --- .../cue/reference/components/sources/base/http_client.cue | 8 ++++++-- .../components/sources/base/prometheus_scrape.cue | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/website/cue/reference/components/sources/base/http_client.cue b/website/cue/reference/components/sources/base/http_client.cue index 7613c8ddd4170..a7e0fa5f37d23 100644 --- a/website/cue/reference/components/sources/base/http_client.cue +++ b/website/cue/reference/components/sources/base/http_client.cue @@ -309,8 +309,12 @@ base: components: sources: http_client: configuration: { } } scrape_interval_secs: { - description: "The interval between scrapes. Requests run concurrently." - required: false + description: """ + The interval between scrapes. Requests are run concurrently so if a scrape takes longer + than the interval a new scrape will be started. This can take extra resources, set the timeout + to a value lower than the scrape interval to prevent this from happening. + """ + required: false type: uint: { default: 15 unit: "seconds" diff --git a/website/cue/reference/components/sources/base/prometheus_scrape.cue b/website/cue/reference/components/sources/base/prometheus_scrape.cue index ad479e4313c83..9e2aa806ce989 100644 --- a/website/cue/reference/components/sources/base/prometheus_scrape.cue +++ b/website/cue/reference/components/sources/base/prometheus_scrape.cue @@ -104,8 +104,12 @@ base: components: sources: prometheus_scrape: configuration: { } } scrape_interval_secs: { - description: "The interval between scrapes. Requests run concurrently." - required: false + description: """ + The interval between scrapes. Requests are run concurrently so if a scrape takes longer + than the interval a new scrape will be started. This can take extra resources, set the timeout + to a value lower than the scrape interval to prevent this from happening. + """ + required: false type: uint: { default: 15 unit: "seconds" From bb01ed0efe5b6410fcecd5b33ae159d9653e3f92 Mon Sep 17 00:00:00 2001 From: Renning Bruns Date: Fri, 21 Jul 2023 16:51:30 -0700 Subject: [PATCH 16/16] make `make check-clippy` happy --- src/sources/http_client/integration_tests.rs | 13 ++++++++++++- src/sources/prometheus/scrape.rs | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/sources/http_client/integration_tests.rs b/src/sources/http_client/integration_tests.rs index 4bd5552ccaf67..495941ed25d9e 100644 --- a/src/sources/http_client/integration_tests.rs +++ b/src/sources/http_client/integration_tests.rs @@ -19,7 +19,7 @@ use codecs::decoding::DeserializerConfig; use vector_core::config::log_schema; use super::{ - tests::{run_compliance, INTERVAL}, + tests::{run_compliance, INTERVAL, TIMEOUT}, HttpClientConfig, }; @@ -53,6 +53,7 @@ async fn invalid_endpoint() { run_error(HttpClientConfig { endpoint: "http://nope".to_string(), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: default_decoding(), framing: default_framing_message_based(), @@ -71,6 +72,7 @@ async fn collected_logs_bytes() { let events = run_compliance(HttpClientConfig { endpoint: format!("{}/logs/bytes", dufs_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Bytes, framing: default_framing_message_based(), @@ -95,6 +97,7 @@ async fn collected_logs_json() { let events = run_compliance(HttpClientConfig { endpoint: format!("{}/logs/json.json", dufs_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: default_framing_message_based(), @@ -119,6 +122,7 @@ async fn collected_metrics_native_json() { let events = run_compliance(HttpClientConfig { endpoint: format!("{}/metrics/native.json", dufs_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::NativeJson(Default::default()), framing: default_framing_message_based(), @@ -148,6 +152,7 @@ async fn collected_trace_native_json() { let events = run_compliance(HttpClientConfig { endpoint: format!("{}/traces/native.json", dufs_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::NativeJson(Default::default()), framing: default_framing_message_based(), @@ -172,6 +177,7 @@ async fn unauthorized_no_auth() { run_error(HttpClientConfig { endpoint: format!("{}/logs/json.json", dufs_auth_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: default_framing_message_based(), @@ -190,6 +196,7 @@ async fn unauthorized_wrong_auth() { run_error(HttpClientConfig { endpoint: format!("{}/logs/json.json", dufs_auth_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: default_framing_message_based(), @@ -211,6 +218,7 @@ async fn authorized() { run_compliance(HttpClientConfig { endpoint: format!("{}/logs/json.json", dufs_auth_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: default_framing_message_based(), @@ -232,6 +240,7 @@ async fn tls_invalid_ca() { run_error(HttpClientConfig { endpoint: format!("{}/logs/json.json", dufs_https_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: default_framing_message_based(), @@ -253,6 +262,7 @@ async fn tls_valid() { run_compliance(HttpClientConfig { endpoint: format!("{}/logs/json.json", dufs_https_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: default_framing_message_based(), @@ -275,6 +285,7 @@ async fn shutdown() { let source = HttpClientConfig { endpoint: format!("{}/logs/json.json", dufs_address()), interval: INTERVAL, + timeout: TIMEOUT, query: HashMap::new(), decoding: DeserializerConfig::Json(Default::default()), framing: default_framing_message_based(), diff --git a/src/sources/prometheus/scrape.rs b/src/sources/prometheus/scrape.rs index 35aa5f5f75d6e..408cde50097e9 100644 --- a/src/sources/prometheus/scrape.rs +++ b/src/sources/prometheus/scrape.rs @@ -773,6 +773,7 @@ mod integration_tests { let config = PrometheusScrapeConfig { endpoints: vec!["http://prometheus:9090/metrics".into()], interval: Duration::from_secs(1), + timeout: Duration::from_secs(1), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), honor_labels: false,