Skip to content

Commit

Permalink
Merge pull request #38 from MassivDash/9.6
Browse files Browse the repository at this point in the history
  • Loading branch information
MassivDash authored Jul 26, 2023
2 parents 7381dbb + 0d78fe1 commit 2962559
Show file tree
Hide file tree
Showing 10 changed files with 269 additions and 43 deletions.
1 change: 1 addition & 0 deletions src-tauri/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions src-tauri/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ cached = "0.44.0"
thiserror = "1.0.37"
itertools = "*"
walkdir = "*"
aws-smithy-http = "*"

[target.'cfg(unix)'.dependencies]
fork = "0.1"
Expand Down
155 changes: 142 additions & 13 deletions src-tauri/src/libs/s3/put/files.rs
Original file line number Diff line number Diff line change
@@ -1,28 +1,155 @@
use crate::libs::s3::utils::get_file_name::get_file_name;
use crate::libs::s3::utils::response_error::create_error;
use crate::libs::s3::{client::client::create_client, utils::response_error::ResponseError};
use aws_sdk_s3::primitives::ByteStream;
use crate::libs::tauri::operations::get_file_size::get_file_size;

use aws_sdk_s3::types::{CompletedMultipartUpload, CompletedPart};
use aws_sdk_s3::Client;
use aws_smithy_http::byte_stream::{ByteStream, Length};
use std::{error::Error, fs, path::Path};
use tauri::Window;
use walkdir::WalkDir;

// AWS recommends using multipart uploads for objects larger than 100 MB.
const MULTIPART_THRESHOLD: u64 = 1024 * 1024 * 100;

//Divide the file into 5MB chunks

//AWS recommends using part sizes between 5 MB and 15 MB for most scenarios.
//This is because smaller part sizes can result in a larger number of parts, which can increase the likelihood of errors and slow down the upload process.
//Larger part sizes can also increase the likelihood of errors and can result in slower upload times if the network speed is slow or if the client machine does not have enough memory to handle the larger parts.

const CHUNK_SIZE: u64 = 1024 * 1024 * 10;

//Maximum number of chunks allowed
//Prevent rust stack overflow on lower end systems
const MAX_CHUNKS: u64 = 10000;

pub async fn put_file(
window: &Window,
client: &Client,
bucket_name: String,
file_name: String,
key: String,
) -> Result<(), Box<dyn Error>> {
let body = ByteStream::from_path(Path::new(&file_name)).await;
client
.put_object()
.bucket(bucket_name)
.key(key)
.body(body.unwrap())
.send()
.await?;

Ok(())
// get actual file name from path
let actual_file_name = get_file_name(&file_name);
// get file size
let file_size = get_file_size(Path::new(&file_name)).unwrap_or_default();

if file_size > MULTIPART_THRESHOLD {
// Emit the window event to update the progress bar
window.emit("event-upload-file", &actual_file_name).unwrap();

//Inform the user that we are starting a multipart upload
window
.emit(
"event-multipart-upload-file",
format!("{} ... currently at 0%", actual_file_name),
)
.unwrap();

let multipart_upload = client
.create_multipart_upload()
.bucket(bucket_name.clone())
.key(key.clone())
.send()
.await?;

let upload_id = multipart_upload.upload_id().unwrap();
let mut parts = Vec::new();

let mut chunk_count = (file_size / CHUNK_SIZE) + 1;
let mut size_of_last_chunk = file_size % CHUNK_SIZE;
if size_of_last_chunk == 0 {
size_of_last_chunk = CHUNK_SIZE;
chunk_count -= 1;
}

if chunk_count > MAX_CHUNKS {
panic!("Too many chunks! Try increasing your chunk size.")
}

for chunk_index in 0..chunk_count {
let this_chunk = if chunk_count - 1 == chunk_index {
size_of_last_chunk
} else {
CHUNK_SIZE
};
let stream = ByteStream::read_from()
.path(&file_name)
.offset(chunk_index * CHUNK_SIZE)
.length(Length::Exact(this_chunk))
.build()
.await
.unwrap();
//Chunk index needs to start at 0, but part numbers start at 1.
let part_number = (chunk_index as i32) + 1;

let upload_part_res = client
.upload_part()
.key(&key)
.bucket(&bucket_name)
.upload_id(upload_id)
.body(stream)
.part_number(part_number)
.send()
.await?;

// Calculate the percentage of the upload
// We need to add 1 to the chunk index because it starts at 0

let percentage = ((chunk_index + 1) as f64 / chunk_count as f64) * 100.0;
let round = percentage.round();

// Emit the window event to update the progress bar
window
.emit(
"event-multipart-upload-file",
format!("{} ... currently at {}%", actual_file_name, round),
)
.unwrap();

parts.push(
CompletedPart::builder()
.e_tag(upload_part_res.e_tag.unwrap_or_default())
.part_number(part_number)
.build(),
);
}

let completed_multipart_upload: CompletedMultipartUpload =
CompletedMultipartUpload::builder()
.set_parts(Some(parts))
.build();

client
.complete_multipart_upload()
.bucket(bucket_name.clone())
.key(key.clone())
.upload_id(multipart_upload.upload_id.clone().unwrap())
.multipart_upload(completed_multipart_upload)
.send()
.await?;

window.emit("event-multipart-upload-file", "").unwrap();

Ok(())
} else {
// Emit the window event to update the progress bar
window.emit("event-upload-file", &actual_file_name).unwrap();

let body = ByteStream::from_path(Path::new(&file_name)).await;
client
.put_object()
.bucket(bucket_name)
.key(key)
.body(body.unwrap())
.send()
.await?;

Ok(())
}
}

#[tauri::command]
Expand Down Expand Up @@ -55,6 +182,7 @@ pub async fn put_files(

if !filename.starts_with(".") {
match put_file(
&window,
&client,
bucket_name.to_string(),
fil.path().display().to_string(),
Expand All @@ -63,7 +191,7 @@ pub async fn put_files(
.await
{
Ok(_) => {
window.emit("event-upload-file", &filename).unwrap();
println!("uploaded {}", filename);
}
Err(err) => {
return Err(create_error(
Expand All @@ -80,6 +208,7 @@ pub async fn put_files(

let key = folder_name.to_string() + "/" + filename;
match put_file(
&window,
&client,
bucket_name.to_string(),
file.clone(),
Expand All @@ -88,7 +217,7 @@ pub async fn put_files(
.await
{
Ok(_) => {
window.emit("event-upload-file", &filename).unwrap();
println!("uploaded {}", filename);
}
Err(err) => {
return Err(create_error("Error uploading file".into(), err.to_string()));
Expand Down
9 changes: 9 additions & 0 deletions src-tauri/src/libs/tauri/operations/get_file_size.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
use std::fs::metadata;
use std::path::Path;

pub fn get_file_size(path: &Path) -> Option<u64> {
if let Ok(file) = metadata(path) {
return Some(file.len());
}
None
}
1 change: 1 addition & 0 deletions src-tauri/src/libs/tauri/operations/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod close_splashscreen;
pub mod get_file_size;
pub mod show_folder;
6 changes: 3 additions & 3 deletions src/components/circularProgress/circularProgress.svelte
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<script lang="ts">
export let progress = 0;
$: angle = 360 * progress;
$: procentage = (progress * 100).toFixed(0) + "%";
$: angle = (360 * progress) / 100;
$: percentage = `${progress}%`;
// Adapt the logic according to the approach
$: background = `radial-gradient(white 50%, transparent 51%),
conic-gradient(transparent 0deg ${angle}deg, gainsboro ${angle}deg 360deg),
Expand All @@ -14,7 +14,7 @@
<div
class="flex items-center justify-center rounded-full w-20 h-20 text-lg dark:bg-slate-800 dark:text-white gap-2 border-0 outline-orange-500 bg-none transition-all hover:bg-gray-50 hover:dark:bg-slate-700 hover:dark:text-orange-50 hover:text-gray-800 active:bg-gray-200"
>
{procentage}
{percentage}
</div>
</div>

Expand Down
4 changes: 2 additions & 2 deletions src/components/circularProgress/circularProgress.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@ describe('CircularProgress', () => {
});

it('should render with 50% progress', () => {
const { getByText } = render(CircularProgress, { progress: 0.5 });
const { getByText } = render(CircularProgress, { progress: 50 });
expect(getByText('50%')).toBeInTheDocument();
});

it('should render with 100% progress', () => {
const { getByText } = render(CircularProgress, { progress: 1 });
const { getByText } = render(CircularProgress, { progress: 100 });
expect(getByText('100%')).toBeInTheDocument();
});
});
Loading

0 comments on commit 2962559

Please sign in to comment.