diff --git a/rust/arrow/src/array/builder.rs b/rust/arrow/src/array/builder.rs index f1037c735fb05..c0300eeb510d5 100644 --- a/rust/arrow/src/array/builder.rs +++ b/rust/arrow/src/array/builder.rs @@ -189,7 +189,7 @@ pub trait BufferBuilderTrait { /// /// assert!(builder.capacity() >= 20); /// ``` - fn reserve(&mut self, n: usize) -> Result<()>; + fn reserve(&mut self, n: usize) -> (); /// Appends a value of type `T` into the builder, /// growing the internal buffer as needed. @@ -292,32 +292,31 @@ impl BufferBuilderTrait for BufferBuilder { } else { (self.len + i) * mem::size_of::() }; - self.buffer.resize(new_buffer_len)?; + self.buffer.resize(new_buffer_len); self.len += i; Ok(()) } #[inline] - fn reserve(&mut self, n: usize) -> Result<()> { + fn reserve(&mut self, n: usize) { let new_capacity = self.len + n; if T::DATA_TYPE == DataType::Boolean { if new_capacity > self.capacity() { let new_byte_capacity = bit_util::ceil(new_capacity, 8); let existing_capacity = self.buffer.capacity(); - let new_capacity = self.buffer.reserve(new_byte_capacity)?; + let new_capacity = self.buffer.reserve(new_byte_capacity); self.buffer .set_null_bits(existing_capacity, new_capacity - existing_capacity); } } else { let byte_capacity = mem::size_of::() * new_capacity; - self.buffer.reserve(byte_capacity)?; + self.buffer.reserve(byte_capacity); } - Ok(()) } #[inline] fn append(&mut self, v: T::Native) -> Result<()> { - self.reserve(1)?; + self.reserve(1); if T::DATA_TYPE == DataType::Boolean { if v != T::default_value() { unsafe { @@ -333,7 +332,7 @@ impl BufferBuilderTrait for BufferBuilder { #[inline] fn append_n(&mut self, n: usize, v: T::Native) -> Result<()> { - self.reserve(n)?; + self.reserve(n); if T::DATA_TYPE == DataType::Boolean { if n != 0 && v != T::default_value() { unsafe { @@ -356,7 +355,7 @@ impl BufferBuilderTrait for BufferBuilder { #[inline] fn append_slice(&mut self, slice: &[T::Native]) -> Result<()> { let array_slots = slice.len(); - self.reserve(array_slots)?; + self.reserve(array_slots); if T::DATA_TYPE == DataType::Boolean { for v in slice { @@ -384,7 +383,7 @@ impl BufferBuilderTrait for BufferBuilder { debug_assert!(new_buffer_len >= self.buffer.len()); let mut buf = std::mem::replace(&mut self.buffer, MutableBuffer::new(0)); self.len = 0; - buf.resize(new_buffer_len).unwrap(); + buf.resize(new_buffer_len); buf.freeze() } else { let buf = std::mem::replace(&mut self.buffer, MutableBuffer::new(0)); @@ -506,8 +505,8 @@ impl ArrayBuilder for PrimitiveBuilder { total_len += array.len(); } // reserve memory - self.values_builder.reserve(total_len)?; - self.bitmap_builder.reserve(total_len)?; + self.values_builder.reserve(total_len); + self.bitmap_builder.reserve(total_len); let mul = T::get_bit_width() / 8; for array in data { @@ -713,8 +712,8 @@ where total_len += array.len(); } // reserve memory - self.offsets_builder.reserve(total_len)?; - self.bitmap_builder.reserve(total_len)?; + self.offsets_builder.reserve(total_len); + self.bitmap_builder.reserve(total_len); // values_builder is allocated by the relevant builder, and is not allocated here // determine the latest offset on the builder @@ -925,8 +924,8 @@ where total_len += array.len(); } // reserve memory - self.offsets_builder.reserve(total_len)?; - self.bitmap_builder.reserve(total_len)?; + self.offsets_builder.reserve(total_len); + self.bitmap_builder.reserve(total_len); // values_builder is allocated by the relevant builder, and is not allocated here // determine the latest offset on the builder @@ -1134,7 +1133,7 @@ where total_len += array.len(); } // reserve memory - self.bitmap_builder.reserve(total_len)?; + self.bitmap_builder.reserve(total_len); // determine the latest offset on the builder for array in data { @@ -1941,7 +1940,7 @@ impl ArrayBuilder for StructBuilder { } total_len += array.len(); } - self.bitmap_builder.reserve(total_len)?; + self.bitmap_builder.reserve(total_len); for array in data { let len = array.len(); @@ -2524,16 +2523,16 @@ mod tests { fn test_reserve() { let mut b = UInt8BufferBuilder::new(2); assert_eq!(64, b.capacity()); - b.reserve(64).unwrap(); + b.reserve(64); assert_eq!(64, b.capacity()); - b.reserve(65).unwrap(); + b.reserve(65); assert_eq!(128, b.capacity()); let mut b = Int32BufferBuilder::new(2); assert_eq!(16, b.capacity()); - b.reserve(16).unwrap(); + b.reserve(16); assert_eq!(16, b.capacity()); - b.reserve(17).unwrap(); + b.reserve(17); assert_eq!(32, b.capacity()); } diff --git a/rust/arrow/src/buffer.rs b/rust/arrow/src/buffer.rs index a1c87297936f4..68d07fc74d4a8 100644 --- a/rust/arrow/src/buffer.rs +++ b/rust/arrow/src/buffer.rs @@ -657,7 +657,7 @@ impl MutableBuffer { /// also ensure the new capacity will be a multiple of 64 bytes. /// /// Returns the new capacity for this buffer. - pub fn reserve(&mut self, capacity: usize) -> Result { + pub fn reserve(&mut self, capacity: usize) -> usize { if capacity > self.capacity { let new_capacity = bit_util::round_upto_multiple_of_64(capacity); let new_capacity = cmp::max(new_capacity, self.capacity * 2); @@ -666,7 +666,7 @@ impl MutableBuffer { self.data = new_data as *mut u8; self.capacity = new_capacity; } - Ok(self.capacity) + self.capacity } /// Resizes the buffer so that the `len` will equal to the `new_len`. @@ -676,9 +676,9 @@ impl MutableBuffer { /// `new_len` will be zeroed out. /// /// If `new_len` is less than `len`, the buffer will be truncated. - pub fn resize(&mut self, new_len: usize) -> Result<()> { + pub fn resize(&mut self, new_len: usize) -> () { if new_len > self.len { - self.reserve(new_len)?; + self.reserve(new_len); } else { let new_capacity = bit_util::round_upto_multiple_of_64(new_len); if new_capacity < self.capacity { @@ -689,7 +689,6 @@ impl MutableBuffer { } } self.len = new_len; - Ok(()) } /// Returns whether this buffer is empty or not. @@ -1028,11 +1027,11 @@ mod tests { assert_eq!(64, buf.capacity()); // Reserving a smaller capacity should have no effect. - let mut new_cap = buf.reserve(10).expect("reserve should be OK"); + let mut new_cap = buf.reserve(10); assert_eq!(64, new_cap); assert_eq!(64, buf.capacity()); - new_cap = buf.reserve(100).expect("reserve should be OK"); + new_cap = buf.reserve(100); assert_eq!(128, new_cap); assert_eq!(128, buf.capacity()); } @@ -1043,23 +1042,23 @@ mod tests { assert_eq!(64, buf.capacity()); assert_eq!(0, buf.len()); - buf.resize(20).expect("resize should be OK"); + buf.resize(20); assert_eq!(64, buf.capacity()); assert_eq!(20, buf.len()); - buf.resize(10).expect("resize should be OK"); + buf.resize(10); assert_eq!(64, buf.capacity()); assert_eq!(10, buf.len()); - buf.resize(100).expect("resize should be OK"); + buf.resize(100); assert_eq!(128, buf.capacity()); assert_eq!(100, buf.len()); - buf.resize(30).expect("resize should be OK"); + buf.resize(30); assert_eq!(64, buf.capacity()); assert_eq!(30, buf.len()); - buf.resize(0).expect("resize should be OK"); + buf.resize(0); assert_eq!(0, buf.capacity()); assert_eq!(0, buf.len()); } @@ -1091,7 +1090,7 @@ mod tests { buf.write_all(&[0xbb])?; assert_eq!(buf, buf2); - buf2.reserve(65)?; + buf2.reserve(65); assert!(buf != buf2); Ok(()) diff --git a/rust/arrow/src/compute/kernels/filter.rs b/rust/arrow/src/compute/kernels/filter.rs index 0d19443f6895f..3870f9ab665ab 100644 --- a/rust/arrow/src/compute/kernels/filter.rs +++ b/rust/arrow/src/compute/kernels/filter.rs @@ -112,7 +112,7 @@ impl<'a> CopyNullBit for NullBitSetter<'a> { } fn null_buffer(&mut self) -> Buffer { - self.target_buffer.resize(self.target_index).unwrap(); + self.target_buffer.resize(self.target_index); // use mem::replace to detach self.target_buffer from self so that it can be returned let target_buffer = mem::replace(&mut self.target_buffer, MutableBuffer::new(0)); target_buffer.freeze() @@ -149,7 +149,7 @@ fn filter_array_impl( let filter_u64 = &filter_context.filter_u64; let data_bytes = data_array.data_ref().buffers()[0].data(); let mut target_buffer = MutableBuffer::new(filtered_count * value_size); - target_buffer.resize(filtered_count * value_size)?; + target_buffer.resize(filtered_count * value_size); let target_bytes = target_buffer.data_mut(); let mut target_byte_index: usize = 0; let mut null_bit_setter = get_null_bit_setter(data_array); diff --git a/rust/arrow/src/compute/kernels/sort.rs b/rust/arrow/src/compute/kernels/sort.rs index da570ef3241aa..eb9e56e54fac6 100644 --- a/rust/arrow/src/compute/kernels/sort.rs +++ b/rust/arrow/src/compute/kernels/sort.rs @@ -262,7 +262,7 @@ where // collect results directly into a buffer instead of a vec to avoid another aligned allocation let mut result = MutableBuffer::new(values.len() * std::mem::size_of::()); // sets len to capacity so we can access the whole buffer as a typed slice - result.resize(values.len() * std::mem::size_of::())?; + result.resize(values.len() * std::mem::size_of::()); let result_slice: &mut [u32] = result.typed_data_mut(); debug_assert_eq!(result_slice.len(), nulls_len + nans_len + valids_len); diff --git a/rust/parquet/src/arrow/array_reader.rs b/rust/parquet/src/arrow/array_reader.rs index a455e48abb696..00e7c74147fca 100644 --- a/rust/parquet/src/arrow/array_reader.rs +++ b/rust/parquet/src/arrow/array_reader.rs @@ -1039,7 +1039,7 @@ impl ArrayReader for StructArrayReader { // calculate struct def level data let buffer_size = children_array_len * size_of::(); let mut def_level_data_buffer = MutableBuffer::new(buffer_size); - def_level_data_buffer.resize(buffer_size)?; + def_level_data_buffer.resize(buffer_size); let def_level_data = def_level_data_buffer.typed_data_mut(); diff --git a/rust/parquet/src/arrow/record_reader.rs b/rust/parquet/src/arrow/record_reader.rs index 519bd15fb0c2f..8d648c53a9312 100644 --- a/rust/parquet/src/arrow/record_reader.rs +++ b/rust/parquet/src/arrow/record_reader.rs @@ -202,7 +202,7 @@ impl RecordReader { let mut new_buffer = MutableBuffer::new( size_of::() * max(MIN_BATCH_SIZE, num_left_values), ); - new_buffer.resize(num_left_values * size_of::())?; + new_buffer.resize(num_left_values * size_of::()); let mut new_def_levels = FatPtr::::with_offset(&mut new_buffer, 0); let new_def_levels = new_def_levels.to_slice_mut(); @@ -213,7 +213,7 @@ impl RecordReader { new_def_levels[0..num_left_values] .copy_from_slice(&left_def_levels[0..num_left_values]); - def_levels_buf.resize(self.num_values * size_of::())?; + def_levels_buf.resize(self.num_values * size_of::()); Some(new_buffer) } else { None @@ -231,7 +231,7 @@ impl RecordReader { let mut new_buffer = MutableBuffer::new( size_of::() * max(MIN_BATCH_SIZE, num_left_values), ); - new_buffer.resize(num_left_values * size_of::())?; + new_buffer.resize(num_left_values * size_of::()); let mut new_rep_levels = FatPtr::::with_offset(&mut new_buffer, 0); let new_rep_levels = new_rep_levels.to_slice_mut(); @@ -242,7 +242,7 @@ impl RecordReader { new_rep_levels[0..num_left_values] .copy_from_slice(&left_rep_levels[0..num_left_values]); - rep_levels_buf.resize(self.num_values * size_of::())?; + rep_levels_buf.resize(self.num_values * size_of::()); Some(new_buffer) } else { @@ -258,7 +258,7 @@ impl RecordReader { // TODO: Optimize to reduce the copy let num_left_values = self.values_written - self.num_values; let mut new_buffer = MutableBuffer::new(max(MIN_BATCH_SIZE, num_left_values)); - new_buffer.resize(num_left_values * T::get_type_size())?; + new_buffer.resize(num_left_values * T::get_type_size()); let mut new_records = FatPtr::::with_offset_and_size(&mut new_buffer, 0, T::get_type_size()); @@ -274,7 +274,7 @@ impl RecordReader { swap(&mut new_records[idx], &mut left_records[idx]); } - self.records.resize(self.num_values * T::get_type_size())?; + self.records.resize(self.num_values * T::get_type_size()); Ok(replace(&mut self.records, new_buffer).freeze()) } @@ -331,14 +331,12 @@ impl RecordReader { fn read_one_batch(&mut self, batch_size: usize) -> Result { // Reserve spaces self.records - .reserve(self.records.len() + batch_size * T::get_type_size())?; + .reserve(self.records.len() + batch_size * T::get_type_size()); if let Some(ref mut buf) = self.rep_levels { - buf.reserve(buf.len() + batch_size * size_of::()) - .map(|_| ())?; + buf.reserve(buf.len() + batch_size * size_of::()); } if let Some(ref mut buf) = self.def_levels { - buf.reserve(buf.len() + batch_size * size_of::()) - .map(|_| ())?; + buf.reserve(buf.len() + batch_size * size_of::()); } // Convert mutable buffer spaces to mutable slices @@ -468,16 +466,16 @@ impl RecordReader { fn set_values_written(&mut self, new_values_written: usize) -> Result<()> { self.values_written = new_values_written; self.records - .resize(self.values_written * T::get_type_size())?; + .resize(self.values_written * T::get_type_size()); let new_levels_len = self.values_written * size_of::(); if let Some(ref mut buf) = self.rep_levels { - buf.resize(new_levels_len)? + buf.resize(new_levels_len) }; if let Some(ref mut buf) = self.def_levels { - buf.resize(new_levels_len)? + buf.resize(new_levels_len) }; Ok(())