Skip to content

Commit 35dc714

Browse files
Fix uploads of large crates to S3 breaking the client
Currently, we reuse the client for all files that are uploaded per call to the relevant code. Unfortunately, S3 will prevent connections from being long-lived (it's unknown what the exact timeout is, but we hit it relatively rarely). This code attempts to retry S3 uploads up to 3 times while replacing the client each time.
1 parent 635e91e commit 35dc714

File tree

1 file changed

+30
-17
lines changed

1 file changed

+30
-17
lines changed

src/db/file.rs

Lines changed: 30 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ pub fn add_path_into_database<P: AsRef<Path>>(conn: &Connection,
121121
try!(cookie.load::<&str>(&[]));
122122

123123
let trans = try!(conn.transaction());
124-
let client = s3_client();
124+
let mut client = s3_client();
125125
let mut file_list_with_mimes: Vec<(String, PathBuf)> = Vec::new();
126126

127127
for file_path in try!(get_file_list(&path)) {
@@ -158,22 +158,35 @@ pub fn add_path_into_database<P: AsRef<Path>>(conn: &Connection,
158158
}
159159
};
160160

161-
let content: Option<Vec<u8>> = if let Some(client) = &client {
162-
let s3_res = client.put_object(PutObjectRequest {
163-
bucket: "rust-docs-rs".into(),
164-
key: bucket_path.clone(),
165-
body: Some(content.clone().into()),
166-
content_type: Some(mime.clone()),
167-
..Default::default()
168-
}).sync();
169-
match s3_res {
170-
// we've successfully uploaded the content, so steal it;
171-
// we don't want to put it in the DB
172-
Ok(_) => None,
173-
// Since s3 was configured, we want to panic on failure to upload.
174-
Err(e) => {
175-
panic!("failed to upload to {}: {:?}", bucket_path, e)
176-
},
161+
let content: Option<Vec<u8>> = if let Some(client) = &mut client {
162+
let mut attempts = 0;
163+
loop {
164+
let s3_res = client.put_object(PutObjectRequest {
165+
bucket: "rust-docs-rs".into(),
166+
key: bucket_path.clone(),
167+
body: Some(content.clone().into()),
168+
content_type: Some(mime.clone()),
169+
..Default::default()
170+
}).sync();
171+
attempts += 1;
172+
match s3_res {
173+
// we've successfully uploaded the content, so steal it;
174+
// we don't want to put it in the DB
175+
Ok(_) => break None,
176+
// Since s3 was configured, we want to panic on failure to upload.
177+
Err(e) => {
178+
log::error!("failed to upload to {}: {:?}", bucket_path, e);
179+
// Get a new client, in case the old one's connection is stale.
180+
// AWS will kill our connection if it's alive for too long; this avoids
181+
// that preventing us from building the crate entirely.
182+
*client = s3_client().unwrap();
183+
if attempts > 3 {
184+
panic!("failed to upload 3 times, exiting");
185+
} else {
186+
continue;
187+
}
188+
},
189+
}
177190
}
178191
} else {
179192
Some(content.clone().into())

0 commit comments

Comments
 (0)