@@ -587,7 +587,7 @@ impl ComposeObjectInternal {
587587 size -= o;
588588 }
589589
590- let mut offset = source. offset . unwrap_or_default ( ) ;
590+ let offset = source. offset . unwrap_or_default ( ) ;
591591
592592 let mut headers = source. get_headers ( ) ;
593593 headers. add_multimap ( ssec_headers. clone ( ) ) ;
@@ -631,19 +631,15 @@ impl ComposeObjectInternal {
631631 size,
632632 } ) ;
633633 } else {
634- while size > 0 {
634+ let part_ranges = calculate_part_ranges ( offset, size, MAX_PART_SIZE ) ;
635+ for ( part_offset, length) in part_ranges {
635636 part_number += 1 ;
636-
637- let mut length = size;
638- if length > MAX_PART_SIZE {
639- length = MAX_PART_SIZE ;
640- }
641- let end_bytes = offset + length - 1 ;
637+ let end_bytes = part_offset + length - 1 ;
642638
643639 let mut headers_copy = headers. clone ( ) ;
644640 headers_copy. add (
645641 X_AMZ_COPY_SOURCE_RANGE ,
646- format ! ( "bytes={offset }-{end_bytes}" ) ,
642+ format ! ( "bytes={part_offset }-{end_bytes}" ) ,
647643 ) ;
648644
649645 let resp: UploadPartCopyResponse = match self
@@ -668,11 +664,8 @@ impl ComposeObjectInternal {
668664 parts. push ( PartInfo {
669665 number : part_number,
670666 etag,
671- size,
667+ size : length ,
672668 } ) ;
673-
674- offset += length;
675- size -= length;
676669 }
677670 }
678671 }
@@ -796,8 +789,8 @@ impl ComposeObject {
796789
797790// region: misc
798791
792+ /// Source object information for [`compose_object`](MinioClient::compose_object).
799793#[ derive( Clone , Debug , Default ) ]
800- /// Source object information for [compose_object](MinioClient::compose_object)
801794pub struct ComposeSource {
802795 pub extra_headers : Option < Multimap > ,
803796 pub extra_query_params : Option < Multimap > ,
@@ -818,7 +811,7 @@ pub struct ComposeSource {
818811}
819812
820813impl ComposeSource {
821- /// Returns a compose source with given bucket name and object name
814+ /// Returns a compose source with given bucket name and object name.
822815 ///
823816 /// # Examples
824817 ///
@@ -927,8 +920,8 @@ impl ComposeSource {
927920 }
928921}
929922
923+ /// Base argument for object conditional read APIs.
930924#[ derive( Clone , Debug , TypedBuilder ) ]
931- /// Base argument for object conditional read APIs
932925pub struct CopySource {
933926 #[ builder( default , setter( into) ) ]
934927 pub extra_headers : Option < Multimap > ,
@@ -1036,4 +1029,122 @@ fn into_headers_copy_object(
10361029
10371030 map
10381031}
1032+
1033+ /// Calculates part ranges (offset, length) for multipart copy operations.
1034+ ///
1035+ /// Given a starting offset, total size, and maximum part size, returns a vector of
1036+ /// (offset, length) tuples for each part. This is extracted as a separate function
1037+ /// to enable unit testing without requiring actual S3 operations or multi-gigabyte files.
1038+ ///
1039+ /// # Arguments
1040+ /// * `start_offset` - Starting byte offset
1041+ /// * `total_size` - Total bytes to copy
1042+ /// * `max_part_size` - Maximum size per part (typically MAX_PART_SIZE = 5GB)
1043+ ///
1044+ /// # Returns
1045+ /// Vector of (offset, length) tuples for each part
1046+ fn calculate_part_ranges (
1047+ start_offset : u64 ,
1048+ total_size : u64 ,
1049+ max_part_size : u64 ,
1050+ ) -> Vec < ( u64 , u64 ) > {
1051+ let mut ranges = Vec :: new ( ) ;
1052+ let mut offset = start_offset;
1053+ let mut remaining = total_size;
1054+
1055+ while remaining > 0 {
1056+ let length = remaining. min ( max_part_size) ;
1057+ ranges. push ( ( offset, length) ) ;
1058+ offset += length;
1059+ remaining -= length;
1060+ }
1061+
1062+ ranges
1063+ }
1064+
10391065// endregion: misc
1066+
1067+ #[ cfg( test) ]
1068+ mod tests {
1069+ use super :: * ;
1070+
1071+ #[ test]
1072+ fn test_calculate_part_ranges_single_part ( ) {
1073+ // Size <= max_part_size should return single part
1074+ let ranges = calculate_part_ranges ( 0 , 1000 , 5000 ) ;
1075+ assert_eq ! ( ranges, vec![ ( 0 , 1000 ) ] ) ;
1076+ }
1077+
1078+ #[ test]
1079+ fn test_calculate_part_ranges_exact_multiple ( ) {
1080+ // Size exactly divisible by max_part_size
1081+ let ranges = calculate_part_ranges ( 0 , 10000 , 5000 ) ;
1082+ assert_eq ! ( ranges, vec![ ( 0 , 5000 ) , ( 5000 , 5000 ) ] ) ;
1083+ }
1084+
1085+ #[ test]
1086+ fn test_calculate_part_ranges_with_remainder ( ) {
1087+ // Size with remainder
1088+ let ranges = calculate_part_ranges ( 0 , 12000 , 5000 ) ;
1089+ assert_eq ! ( ranges, vec![ ( 0 , 5000 ) , ( 5000 , 5000 ) , ( 10000 , 2000 ) ] ) ;
1090+ }
1091+
1092+ #[ test]
1093+ fn test_calculate_part_ranges_with_start_offset ( ) {
1094+ // Starting from non-zero offset
1095+ let ranges = calculate_part_ranges ( 1000 , 12000 , 5000 ) ;
1096+ assert_eq ! ( ranges, vec![ ( 1000 , 5000 ) , ( 6000 , 5000 ) , ( 11000 , 2000 ) ] ) ;
1097+ }
1098+
1099+ #[ test]
1100+ fn test_calculate_part_ranges_zero_size ( ) {
1101+ // Zero size edge case - returns empty
1102+ let ranges = calculate_part_ranges ( 0 , 0 , 5000 ) ;
1103+ assert ! ( ranges. is_empty( ) ) ;
1104+ }
1105+
1106+ #[ test]
1107+ fn test_calculate_part_ranges_realistic ( ) {
1108+ // Simulate 12GB file with 5GB max part size
1109+ let total_size: u64 = 12 * 1024 * 1024 * 1024 ; // 12 GB
1110+ let max_part_size: u64 = 5 * 1024 * 1024 * 1024 ; // 5 GB
1111+
1112+ let ranges = calculate_part_ranges ( 0 , total_size, max_part_size) ;
1113+
1114+ assert_eq ! ( ranges. len( ) , 3 ) ;
1115+ assert_eq ! ( ranges[ 0 ] , ( 0 , max_part_size) ) ; // 0-5GB
1116+ assert_eq ! ( ranges[ 1 ] , ( max_part_size, max_part_size) ) ; // 5GB-10GB
1117+ assert_eq ! ( ranges[ 2 ] , ( 2 * max_part_size, 2 * 1024 * 1024 * 1024 ) ) ; // 10GB-12GB
1118+
1119+ // Verify total size matches
1120+ let total: u64 = ranges. iter ( ) . map ( |( _, len) | len) . sum ( ) ;
1121+ assert_eq ! ( total, total_size) ;
1122+
1123+ // Verify offsets are contiguous
1124+ let mut expected_offset = 0 ;
1125+ for ( offset, length) in & ranges {
1126+ assert_eq ! ( * offset, expected_offset) ;
1127+ expected_offset += length;
1128+ }
1129+ }
1130+
1131+ #[ test]
1132+ fn test_calculate_part_ranges_each_part_correct_length ( ) {
1133+ // This test catches the bug where `size` (remaining) was used instead of `length`
1134+ let ranges = calculate_part_ranges ( 0 , 17000 , 5000 ) ;
1135+
1136+ // Should be [(0,5000), (5000,5000), (10000,5000), (15000,2000)]
1137+ // NOT [(0,17000), (17000,12000), ...] which the buggy code would produce
1138+ assert_eq ! (
1139+ ranges,
1140+ vec![ ( 0 , 5000 ) , ( 5000 , 5000 ) , ( 10000 , 5000 ) , ( 15000 , 2000 ) ]
1141+ ) ;
1142+
1143+ // Each non-final part should be exactly max_part_size
1144+ for ( i, ( _, length) ) in ranges. iter ( ) . enumerate ( ) {
1145+ if i < ranges. len ( ) - 1 {
1146+ assert_eq ! ( * length, 5000 , "Part {} should be max_part_size" , i) ;
1147+ }
1148+ }
1149+ }
1150+ }
0 commit comments