Skip to content

Commit

Permalink
Remove now-unnecessary reqwest check (#23139)
Browse files Browse the repository at this point in the history
Added a regression test.

Reqwest fixed the issue upstream.
seanmonstar/reqwest#668

GitOrigin-RevId: c58c994841934fc15530a0eea612aa9dea130b3d
  • Loading branch information
nipunn1313 authored and Convex, Inc. committed Mar 8, 2024
1 parent 51ecc6d commit 2653b58
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 12 deletions.
37 changes: 31 additions & 6 deletions crates/common/src/http/fetch.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
use std::str::FromStr;

use anyhow::Context;
use async_trait::async_trait;
use futures::{
StreamExt,
Expand Down Expand Up @@ -51,9 +48,6 @@ impl ProxiedFetchClient {
#[async_trait]
impl FetchClient for ProxiedFetchClient {
async fn fetch(&self, request: HttpRequestStream) -> anyhow::Result<HttpResponseStream> {
// reqwest has a bug https://github.com/seanmonstar/reqwest/issues/668
// where it panics on invalid urls. Workaround by adding an explicit check.
http::Uri::from_str(request.url.as_str()).context("Invalid URL")?;
let mut request_builder = self.0.request(request.method, request.url.as_str());
let body = Body::wrap_stream(request.body);
request_builder = request_builder.body(body);
Expand Down Expand Up @@ -83,3 +77,34 @@ impl FetchClient for ProxiedFetchClient {
pub enum InternalFetchPurpose {
UsageTracking,
}

#[cfg(test)]
mod tests {
use http::Method;

use super::ProxiedFetchClient;
use crate::http::{
fetch::FetchClient,
HttpRequest,
};

#[tokio::test]
async fn test_fetch_bad_url() -> anyhow::Result<()> {
let client = ProxiedFetchClient::new(None);
let request = HttpRequest {
headers: Default::default(),
url: "http://\"".parse()?,
method: Method::GET,
body: None,
};
let Err(err) = client.fetch(request.into()).await else {
panic!("Expected Invalid URL error");
};

// Ensure it doesn't panic. Regression test for.
// https://github.com/seanmonstar/reqwest/issues/668
assert!(err.to_string().contains("Parsed Url is not a valid Uri"));

Ok(())
}
}
5 changes: 0 additions & 5 deletions crates/runtime/src/prod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use std::{
Sub,
},
pin::Pin,
str::FromStr,
sync::LazyLock,
thread,
time::{
Expand All @@ -19,7 +18,6 @@ use std::{
};

use ::metrics::CONVEX_METRICS_REGISTRY;
use anyhow::Context;
use async_trait::async_trait;
use common::{
heap_size::HeapSize,
Expand Down Expand Up @@ -277,9 +275,6 @@ impl Runtime for ProdRuntime {
request: HttpRequestStream,
_purpose: InternalFetchPurpose,
) -> anyhow::Result<HttpResponseStream> {
// reqwest has a bug https://github.com/seanmonstar/reqwest/issues/668
// where it panics on invalid urls. Workaround by adding an explicit check.
http::Uri::from_str(request.url.as_str()).context("Invalid URL")?;
let mut request_builder = self
.internal_http_client
.request(request.method, request.url.as_str());
Expand Down
2 changes: 1 addition & 1 deletion npm-packages/udf-tests/convex/fetch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ async function fetchInvalidUriError() {

async function fetchMalformedUriError() {
const url = new URL("http://{{google/");
await expect(fetch(url)).to.be.rejectedWith(/Invalid URL/);
await expect(fetch(url)).to.be.rejectedWith(/Parsed Url is not a valid Uri/);
}

async function fetchJson() {
Expand Down

0 comments on commit 2653b58

Please sign in to comment.