Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: range request support #330

Merged
merged 5 commits into from
Oct 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion iroh-api/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl Api for Iroh {
if out.is_dir() {
yield (relative_path, OutType::Dir);
} else {
let reader = out.pretty(resolver.clone(), Default::default())?;
let reader = out.pretty(resolver.clone(), Default::default(), iroh_resolver::resolver::ResponseClip::NoClip)?;
yield (relative_path, OutType::Reader(Box::new(reader)));
}
}
Expand Down
33 changes: 28 additions & 5 deletions iroh-gateway/src/client.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use std::ops::Range;
use std::pin::Pin;
use std::task::Poll;

Expand All @@ -14,10 +15,10 @@ use iroh_metrics::{
};
use iroh_resolver::resolver::{
CidOrDomain, ContentLoader, Metadata, Out, OutMetrics, OutPrettyReader, OutType, Resolver,
Source,
ResponseClip, Source,
};
use mime::Mime;
use tokio::io::{AsyncBufReadExt, AsyncReadExt, AsyncWrite};
use tokio::io::{AsyncBufReadExt, AsyncReadExt, AsyncSeekExt, AsyncWrite};
use tokio_util::io::ReaderStream;
use tracing::{info, warn};

Expand Down Expand Up @@ -46,6 +47,10 @@ impl<T: ContentLoader> PrettyStreamBody<T> {
pub fn get_mime(&self) -> Option<Mime> {
self.2.clone()
}

pub fn get_size(&self) -> Option<u64> {
self.1
}
}

impl<T: ContentLoader + std::marker::Unpin> http_body::Body for PrettyStreamBody<T> {
Expand Down Expand Up @@ -93,6 +98,7 @@ impl<T: ContentLoader + std::marker::Unpin> Client<T> {
&self,
path: iroh_resolver::resolver::Path,
start_time: std::time::Instant,
range: Option<Range<u64>>,
) -> Result<(FileResult<T>, Metadata), String> {
info!("get file {}", path);
let res = self
Expand All @@ -107,13 +113,27 @@ impl<T: ContentLoader + std::marker::Unpin> Client<T> {
let body = FileResult::Directory(res);
Ok((body, metadata))
} else {
let mut clip = 0;
if let Some(range) = &range {
clip = range.end as usize;
}
let reader = res
.pretty(self.resolver.clone(), OutMetrics { start: start_time })
.pretty(
self.resolver.clone(),
OutMetrics { start: start_time },
ResponseClip::from(clip),
)
.map_err(|e| e.to_string())?;

let mut buf_reader = tokio::io::BufReader::with_capacity(1024 * 1024, reader);
let body_sample = buf_reader.fill_buf().await.map_err(|e| e.to_string())?;
let mime = sniff_content_type(body_sample);
if let Some(range) = range {
buf_reader
.seek(tokio::io::SeekFrom::Start(range.start))
.await
.map_err(|e| e.to_string())?;
}
let stream = ReaderStream::new(buf_reader);

let body = PrettyStreamBody(stream, metadata.size, Some(mime));
Expand Down Expand Up @@ -164,8 +184,11 @@ impl<T: ContentLoader + std::marker::Unpin> Client<T> {
Ok(res) => {
let metadata = res.metadata().clone();
record_ttfb_metrics(start_time, &metadata.source);
let reader =
res.pretty(self.resolver.clone(), OutMetrics { start: start_time });
let reader = res.pretty(
self.resolver.clone(),
OutMetrics { start: start_time },
ResponseClip::NoClip,
);
match reader {
Ok(mut reader) => {
let mut bytes = Vec::new();
Expand Down
61 changes: 50 additions & 11 deletions iroh-gateway/src/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use axum::{
body::{self, Body, HttpBody},
error_handling::HandleErrorLayer,
extract::{Extension, Path, Query},
http::{header::*, StatusCode},
http::{header::*, Request as HttpRequest, StatusCode},
response::IntoResponse,
routing::get,
BoxError, Router,
Expand All @@ -28,6 +28,7 @@ use std::{
collections::HashMap,
error::Error,
fmt::Write,
ops::Range,
sync::Arc,
time::{self, Duration},
};
Expand Down Expand Up @@ -121,6 +122,7 @@ pub async fn get_handler<T: ContentLoader + std::marker::Unpin>(
Path(params): Path<HashMap<String, String>>,
Query(query_params): Query<GetParams>,
method: http::Method,
http_req: HttpRequest<Body>,
request_headers: HeaderMap,
) -> Result<GatewayResponse, GatewayError> {
inc!(GatewayMetrics::Requests);
Expand Down Expand Up @@ -159,6 +161,7 @@ pub async fn get_handler<T: ContentLoader + std::marker::Unpin>(
.parse()
.map_err(|e: anyhow::Error| e.to_string())
.map_err(|e| error(StatusCode::BAD_REQUEST, &e, &state))?;
// TODO: handle 404 or error
let resolved_cid = resolved_path.root();

if handle_only_if_cached(&request_headers, &state, resolved_cid).await? {
Expand Down Expand Up @@ -220,9 +223,9 @@ pub async fn get_handler<T: ContentLoader + std::marker::Unpin>(
serve_car_recursive(&req, state, headers, start_time).await
} else {
match req.format {
ResponseFormat::Raw => serve_raw(&req, state, headers, start_time).await,
ResponseFormat::Raw => serve_raw(&req, state, headers, &http_req, start_time).await,
ResponseFormat::Car => serve_car(&req, state, headers, start_time).await,
ResponseFormat::Fs(_) => serve_fs(&req, state, headers, start_time).await,
ResponseFormat::Fs(_) => serve_fs(&req, state, headers, &http_req, start_time).await,
}
}
}
Expand Down Expand Up @@ -404,12 +407,18 @@ async fn serve_raw<T: ContentLoader + std::marker::Unpin>(
req: &Request,
state: Arc<State<T>>,
mut headers: HeaderMap,
http_req: &HttpRequest<Body>,
start_time: std::time::Instant,
) -> Result<GatewayResponse, GatewayError> {
let range: Option<Range<u64>> = if http_req.headers().contains_key(RANGE) {
parse_range_header(http_req.headers().get(RANGE).unwrap())
} else {
None
};
// FIXME: we currently only retrieve full cids
let (body, metadata) = state
.client
.get_file(req.resolved_path.clone(), start_time)
.get_file(req.resolved_path.clone(), start_time, range.clone())
.await
.map_err(|e| error(StatusCode::INTERNAL_SERVER_ERROR, &e, &state))?;

Expand All @@ -423,8 +432,18 @@ async fn serve_raw<T: ContentLoader + std::marker::Unpin>(
set_content_disposition_headers(&mut headers, &file_name, DISPOSITION_ATTACHMENT);
set_etag_headers(&mut headers, get_etag(&req.cid, Some(req.format.clone())));
add_cache_control_headers(&mut headers, metadata.clone());
add_ipfs_roots_headers(&mut headers, metadata);
response(StatusCode::OK, body, headers)
add_ipfs_roots_headers(&mut headers, metadata.clone());

if let Some(mut capped_range) = range {
if let Some(size) = metadata.size {
capped_range.end = std::cmp::min(capped_range.end, size);
}
add_etag_range(&mut headers, capped_range.clone());
add_content_range_headers(&mut headers, capped_range, metadata.size);
response(StatusCode::PARTIAL_CONTENT, body, headers)
} else {
response(StatusCode::OK, body, headers)
}
}
FileResult::Directory(_) => Err(error(
StatusCode::INTERNAL_SERVER_ERROR,
Expand All @@ -445,7 +464,7 @@ async fn serve_car<T: ContentLoader + std::marker::Unpin>(
// FIXME: we currently only retrieve full cids
let (body, metadata) = state
.client
.get_file(req.resolved_path.clone(), start_time)
.get_file(req.resolved_path.clone(), start_time, None)
.await
.map_err(|e| error(StatusCode::INTERNAL_SERVER_ERROR, &e, &state))?;

Expand Down Expand Up @@ -509,12 +528,19 @@ async fn serve_fs<T: ContentLoader + std::marker::Unpin>(
req: &Request,
state: Arc<State<T>>,
mut headers: HeaderMap,
http_req: &HttpRequest<Body>,
start_time: std::time::Instant,
) -> Result<GatewayResponse, GatewayError> {
let range: Option<Range<u64>> = if http_req.headers().contains_key(RANGE) {
parse_range_header(http_req.headers().get(RANGE).unwrap())
} else {
None
};

// FIXME: we currently only retrieve full cids
let (body, metadata) = state
.client
.get_file(req.resolved_path.clone(), start_time)
.get_file(req.resolved_path.clone(), start_time, range.clone())
.await
.map_err(|e| error(StatusCode::INTERNAL_SERVER_ERROR, &e, &state))?;

Expand All @@ -528,7 +554,9 @@ async fn serve_fs<T: ContentLoader + std::marker::Unpin>(
.try_collect()
.await;
match dir_list {
Ok(dir_list) => serve_fs_dir(&dir_list, req, state, headers, start_time).await,
Ok(dir_list) => {
serve_fs_dir(&dir_list, req, state, headers, http_req, start_time).await
}
Err(e) => {
tracing::warn!("failed to read dir: {:?}", e);
Err(error(
Expand Down Expand Up @@ -561,7 +589,17 @@ async fn serve_fs<T: ContentLoader + std::marker::Unpin>(
let content_sniffed_mime = body.get_mime();
add_content_type_headers(&mut headers, &name, content_sniffed_mime);
}
response(StatusCode::OK, body, headers)

if let Some(mut capped_range) = range {
if let Some(size) = metadata.size {
capped_range.end = std::cmp::min(capped_range.end, size);
}
add_etag_range(&mut headers, capped_range.clone());
add_content_range_headers(&mut headers, capped_range, metadata.size);
response(StatusCode::PARTIAL_CONTENT, body, headers)
} else {
response(StatusCode::OK, body, headers)
}
}
None => Err(error(
StatusCode::BAD_REQUEST,
Expand Down Expand Up @@ -594,6 +632,7 @@ async fn serve_fs_dir<T: ContentLoader + std::marker::Unpin>(
req: &Request,
state: Arc<State<T>>,
mut headers: HeaderMap,
http_req: &HttpRequest<Body>,
start_time: std::time::Instant,
) -> Result<GatewayResponse, GatewayError> {
let force_dir = req.query_params.force_dir.unwrap_or(false);
Expand All @@ -614,7 +653,7 @@ async fn serve_fs_dir<T: ContentLoader + std::marker::Unpin>(
}
let mut new_req = req.clone();
new_req.resolved_path.push("index.html");
return serve_fs(&new_req, state, headers, start_time).await;
return serve_fs(&new_req, state, headers, http_req, start_time).await;
}

headers.insert(CONTENT_TYPE, HeaderValue::from_str("text/html").unwrap());
Expand Down
84 changes: 83 additions & 1 deletion iroh-gateway/src/headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use ::time::OffsetDateTime;
use axum::http::header::*;
use iroh_resolver::resolver::{CidOrDomain, Metadata, PathType};
use mime::Mime;
use std::{fmt::Write, time};
use std::{fmt::Write, ops::Range, time};

#[tracing::instrument()]
pub fn add_user_headers(headers: &mut HeaderMap, user_headers: HeaderMap) {
Expand Down Expand Up @@ -63,12 +63,47 @@ pub fn add_content_disposition_headers(

#[tracing::instrument()]
pub fn set_content_disposition_headers(headers: &mut HeaderMap, filename: &str, disposition: &str) {
// TODO: handle non-ascii filenames https://github.com/ipfs/specs/blob/main/http-gateways/PATH_GATEWAY.md#content-disposition-response-header
headers.insert(
CONTENT_DISPOSITION,
HeaderValue::from_str(&format!("{}; filename={}", disposition, filename)).unwrap(),
);
}

#[tracing::instrument()]
pub fn add_content_range_headers(headers: &mut HeaderMap, range: Range<u64>, size: Option<u64>) {
if range.end == 0 {
// this should never happen as it is checked for in parse_range_header
// but just to avoid any footguns
return;
}
let content_range = if let Some(size) = size {
format!("bytes {}-{}/{}", range.start, range.end - 1, size)
Arqu marked this conversation as resolved.
Show resolved Hide resolved
} else {
format!("bytes {}-{}/{}", range.start, range.end - 1, "*")
};
headers.insert(
CONTENT_RANGE,
HeaderValue::from_str(&content_range).unwrap(),
);
}

pub fn parse_range_header(range: &HeaderValue) -> Option<Range<u64>> {
Arqu marked this conversation as resolved.
Show resolved Hide resolved
// TODO: potentially support multiple ranges ie bytes=0-100,200-300
let range = range.to_str().ok()?;
let mut parts = range.splitn(2, '=');
if parts.next() != Some("bytes") {
return None;
}
let mut range = parts.next()?.splitn(2, '-');
let start = range.next()?.parse().ok()?;
let end = range.next()?.parse().ok()?;
if start >= end || end == 0 {
return None;
}
Some(Range { start, end })
}

#[tracing::instrument()]
pub fn add_cache_control_headers(headers: &mut HeaderMap, metadata: Metadata) {
if metadata.path.typ() == PathType::Ipns {
Expand Down Expand Up @@ -98,6 +133,16 @@ pub fn set_etag_headers(headers: &mut HeaderMap, etag: String) {
headers.insert(ETAG, HeaderValue::from_str(&etag).unwrap());
}

#[tracing::instrument()]
pub fn add_etag_range(headers: &mut HeaderMap, range: Range<u64>) {
if headers.contains_key(ETAG) {
let etag = headers.get(ETAG).unwrap().to_str().unwrap();
let etag = etag.trim_end_matches('"');
let etag = format!("{}.{}-{}\"", etag, range.start, range.end - 1);
headers.insert(ETAG, HeaderValue::from_str(&etag).unwrap());
}
}

#[tracing::instrument()]
pub fn get_etag(cid: &CidOrDomain, response_format: Option<ResponseFormat>) -> String {
match cid {
Expand Down Expand Up @@ -231,6 +276,43 @@ mod tests {
);
}

#[test]
fn parse_range_header_test() {
let range = HeaderValue::from_str("bytes=0-10").unwrap();
let r = parse_range_header(&range);
assert_eq!(r, Some(Range { start: 0, end: 10 }));

let range = HeaderValue::from_str("byts=0-10").unwrap();
let r = parse_range_header(&range);
assert_eq!(r, None);

let range = HeaderValue::from_str("bytes=0-").unwrap();
let r = parse_range_header(&range);
assert_eq!(r, None);

let range = HeaderValue::from_str("bytes=10-1").unwrap();
let r = parse_range_header(&range);
assert_eq!(r, None);

let range = HeaderValue::from_str("bytes=0-0").unwrap();
let r = parse_range_header(&range);
assert_eq!(r, None);

let range = HeaderValue::from_str("bytes=100-200").unwrap();
let r = parse_range_header(&range);
assert_eq!(
r,
Some(Range {
start: 100,
end: 200
})
);

let range = HeaderValue::from_str("bytes=0-10,20-30").unwrap();
let r = parse_range_header(&range);
assert_eq!(r, None);
}

#[test]
fn add_content_disposition_headers_test() {
// inline
Expand Down
Loading