Skip to content

Commit

Permalink
ARROW-10552: [Rust] Removed un-used Result
Browse files Browse the repository at this point in the history
This PR is a minor simplification of the code base around `Buffer::reserve`. The idea is that this function is infalible (it just returns `Ok(...)`) and therefore does not require to return a `Result`. Since this function is on a very low level, it leads to the removal of the `Result` in some places.

Closes apache#8636 from jorgecarleitao/clean_result

Authored-by: Jorge C. Leitao <jorgecarleitao@gmail.com>
Signed-off-by: alamb <andrew@nerdnetworks.org>
  • Loading branch information
jorgecarleitao authored and yordan-pavlov committed Nov 14, 2020
1 parent 3e0ca7e commit 1605cfb
Show file tree
Hide file tree
Showing 6 changed files with 49 additions and 53 deletions.
43 changes: 21 additions & 22 deletions rust/arrow/src/array/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ pub trait BufferBuilderTrait<T: ArrowPrimitiveType> {
///
/// assert!(builder.capacity() >= 20);
/// ```
fn reserve(&mut self, n: usize) -> Result<()>;
fn reserve(&mut self, n: usize) -> ();

/// Appends a value of type `T` into the builder,
/// growing the internal buffer as needed.
Expand Down Expand Up @@ -292,32 +292,31 @@ impl<T: ArrowPrimitiveType> BufferBuilderTrait<T> for BufferBuilder<T> {
} else {
(self.len + i) * mem::size_of::<T::Native>()
};
self.buffer.resize(new_buffer_len)?;
self.buffer.resize(new_buffer_len);
self.len += i;
Ok(())
}

#[inline]
fn reserve(&mut self, n: usize) -> Result<()> {
fn reserve(&mut self, n: usize) {
let new_capacity = self.len + n;
if T::DATA_TYPE == DataType::Boolean {
if new_capacity > self.capacity() {
let new_byte_capacity = bit_util::ceil(new_capacity, 8);
let existing_capacity = self.buffer.capacity();
let new_capacity = self.buffer.reserve(new_byte_capacity)?;
let new_capacity = self.buffer.reserve(new_byte_capacity);
self.buffer
.set_null_bits(existing_capacity, new_capacity - existing_capacity);
}
} else {
let byte_capacity = mem::size_of::<T::Native>() * new_capacity;
self.buffer.reserve(byte_capacity)?;
self.buffer.reserve(byte_capacity);
}
Ok(())
}

#[inline]
fn append(&mut self, v: T::Native) -> Result<()> {
self.reserve(1)?;
self.reserve(1);
if T::DATA_TYPE == DataType::Boolean {
if v != T::default_value() {
unsafe {
Expand All @@ -333,7 +332,7 @@ impl<T: ArrowPrimitiveType> BufferBuilderTrait<T> for BufferBuilder<T> {

#[inline]
fn append_n(&mut self, n: usize, v: T::Native) -> Result<()> {
self.reserve(n)?;
self.reserve(n);
if T::DATA_TYPE == DataType::Boolean {
if n != 0 && v != T::default_value() {
unsafe {
Expand All @@ -356,7 +355,7 @@ impl<T: ArrowPrimitiveType> BufferBuilderTrait<T> for BufferBuilder<T> {
#[inline]
fn append_slice(&mut self, slice: &[T::Native]) -> Result<()> {
let array_slots = slice.len();
self.reserve(array_slots)?;
self.reserve(array_slots);

if T::DATA_TYPE == DataType::Boolean {
for v in slice {
Expand Down Expand Up @@ -384,7 +383,7 @@ impl<T: ArrowPrimitiveType> BufferBuilderTrait<T> for BufferBuilder<T> {
debug_assert!(new_buffer_len >= self.buffer.len());
let mut buf = std::mem::replace(&mut self.buffer, MutableBuffer::new(0));
self.len = 0;
buf.resize(new_buffer_len).unwrap();
buf.resize(new_buffer_len);
buf.freeze()
} else {
let buf = std::mem::replace(&mut self.buffer, MutableBuffer::new(0));
Expand Down Expand Up @@ -506,8 +505,8 @@ impl<T: ArrowPrimitiveType> ArrayBuilder for PrimitiveBuilder<T> {
total_len += array.len();
}
// reserve memory
self.values_builder.reserve(total_len)?;
self.bitmap_builder.reserve(total_len)?;
self.values_builder.reserve(total_len);
self.bitmap_builder.reserve(total_len);

let mul = T::get_bit_width() / 8;
for array in data {
Expand Down Expand Up @@ -713,8 +712,8 @@ where
total_len += array.len();
}
// reserve memory
self.offsets_builder.reserve(total_len)?;
self.bitmap_builder.reserve(total_len)?;
self.offsets_builder.reserve(total_len);
self.bitmap_builder.reserve(total_len);
// values_builder is allocated by the relevant builder, and is not allocated here

// determine the latest offset on the builder
Expand Down Expand Up @@ -925,8 +924,8 @@ where
total_len += array.len();
}
// reserve memory
self.offsets_builder.reserve(total_len)?;
self.bitmap_builder.reserve(total_len)?;
self.offsets_builder.reserve(total_len);
self.bitmap_builder.reserve(total_len);
// values_builder is allocated by the relevant builder, and is not allocated here

// determine the latest offset on the builder
Expand Down Expand Up @@ -1134,7 +1133,7 @@ where
total_len += array.len();
}
// reserve memory
self.bitmap_builder.reserve(total_len)?;
self.bitmap_builder.reserve(total_len);

// determine the latest offset on the builder
for array in data {
Expand Down Expand Up @@ -1941,7 +1940,7 @@ impl ArrayBuilder for StructBuilder {
}
total_len += array.len();
}
self.bitmap_builder.reserve(total_len)?;
self.bitmap_builder.reserve(total_len);

for array in data {
let len = array.len();
Expand Down Expand Up @@ -2524,16 +2523,16 @@ mod tests {
fn test_reserve() {
let mut b = UInt8BufferBuilder::new(2);
assert_eq!(64, b.capacity());
b.reserve(64).unwrap();
b.reserve(64);
assert_eq!(64, b.capacity());
b.reserve(65).unwrap();
b.reserve(65);
assert_eq!(128, b.capacity());

let mut b = Int32BufferBuilder::new(2);
assert_eq!(16, b.capacity());
b.reserve(16).unwrap();
b.reserve(16);
assert_eq!(16, b.capacity());
b.reserve(17).unwrap();
b.reserve(17);
assert_eq!(32, b.capacity());
}

Expand Down
25 changes: 12 additions & 13 deletions rust/arrow/src/buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -657,7 +657,7 @@ impl MutableBuffer {
/// also ensure the new capacity will be a multiple of 64 bytes.
///
/// Returns the new capacity for this buffer.
pub fn reserve(&mut self, capacity: usize) -> Result<usize> {
pub fn reserve(&mut self, capacity: usize) -> usize {
if capacity > self.capacity {
let new_capacity = bit_util::round_upto_multiple_of_64(capacity);
let new_capacity = cmp::max(new_capacity, self.capacity * 2);
Expand All @@ -666,7 +666,7 @@ impl MutableBuffer {
self.data = new_data as *mut u8;
self.capacity = new_capacity;
}
Ok(self.capacity)
self.capacity
}

/// Resizes the buffer so that the `len` will equal to the `new_len`.
Expand All @@ -676,9 +676,9 @@ impl MutableBuffer {
/// `new_len` will be zeroed out.
///
/// If `new_len` is less than `len`, the buffer will be truncated.
pub fn resize(&mut self, new_len: usize) -> Result<()> {
pub fn resize(&mut self, new_len: usize) -> () {
if new_len > self.len {
self.reserve(new_len)?;
self.reserve(new_len);
} else {
let new_capacity = bit_util::round_upto_multiple_of_64(new_len);
if new_capacity < self.capacity {
Expand All @@ -689,7 +689,6 @@ impl MutableBuffer {
}
}
self.len = new_len;
Ok(())
}

/// Returns whether this buffer is empty or not.
Expand Down Expand Up @@ -1028,11 +1027,11 @@ mod tests {
assert_eq!(64, buf.capacity());

// Reserving a smaller capacity should have no effect.
let mut new_cap = buf.reserve(10).expect("reserve should be OK");
let mut new_cap = buf.reserve(10);
assert_eq!(64, new_cap);
assert_eq!(64, buf.capacity());

new_cap = buf.reserve(100).expect("reserve should be OK");
new_cap = buf.reserve(100);
assert_eq!(128, new_cap);
assert_eq!(128, buf.capacity());
}
Expand All @@ -1043,23 +1042,23 @@ mod tests {
assert_eq!(64, buf.capacity());
assert_eq!(0, buf.len());

buf.resize(20).expect("resize should be OK");
buf.resize(20);
assert_eq!(64, buf.capacity());
assert_eq!(20, buf.len());

buf.resize(10).expect("resize should be OK");
buf.resize(10);
assert_eq!(64, buf.capacity());
assert_eq!(10, buf.len());

buf.resize(100).expect("resize should be OK");
buf.resize(100);
assert_eq!(128, buf.capacity());
assert_eq!(100, buf.len());

buf.resize(30).expect("resize should be OK");
buf.resize(30);
assert_eq!(64, buf.capacity());
assert_eq!(30, buf.len());

buf.resize(0).expect("resize should be OK");
buf.resize(0);
assert_eq!(0, buf.capacity());
assert_eq!(0, buf.len());
}
Expand Down Expand Up @@ -1091,7 +1090,7 @@ mod tests {
buf.write_all(&[0xbb])?;
assert_eq!(buf, buf2);

buf2.reserve(65)?;
buf2.reserve(65);
assert!(buf != buf2);

Ok(())
Expand Down
4 changes: 2 additions & 2 deletions rust/arrow/src/compute/kernels/filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ impl<'a> CopyNullBit for NullBitSetter<'a> {
}

fn null_buffer(&mut self) -> Buffer {
self.target_buffer.resize(self.target_index).unwrap();
self.target_buffer.resize(self.target_index);
// use mem::replace to detach self.target_buffer from self so that it can be returned
let target_buffer = mem::replace(&mut self.target_buffer, MutableBuffer::new(0));
target_buffer.freeze()
Expand Down Expand Up @@ -149,7 +149,7 @@ fn filter_array_impl(
let filter_u64 = &filter_context.filter_u64;
let data_bytes = data_array.data_ref().buffers()[0].data();
let mut target_buffer = MutableBuffer::new(filtered_count * value_size);
target_buffer.resize(filtered_count * value_size)?;
target_buffer.resize(filtered_count * value_size);
let target_bytes = target_buffer.data_mut();
let mut target_byte_index: usize = 0;
let mut null_bit_setter = get_null_bit_setter(data_array);
Expand Down
2 changes: 1 addition & 1 deletion rust/arrow/src/compute/kernels/sort.rs
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ where
// collect results directly into a buffer instead of a vec to avoid another aligned allocation
let mut result = MutableBuffer::new(values.len() * std::mem::size_of::<u32>());
// sets len to capacity so we can access the whole buffer as a typed slice
result.resize(values.len() * std::mem::size_of::<u32>())?;
result.resize(values.len() * std::mem::size_of::<u32>());
let result_slice: &mut [u32] = result.typed_data_mut();

debug_assert_eq!(result_slice.len(), nulls_len + nans_len + valids_len);
Expand Down
2 changes: 1 addition & 1 deletion rust/parquet/src/arrow/array_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1039,7 +1039,7 @@ impl ArrayReader for StructArrayReader {
// calculate struct def level data
let buffer_size = children_array_len * size_of::<i16>();
let mut def_level_data_buffer = MutableBuffer::new(buffer_size);
def_level_data_buffer.resize(buffer_size)?;
def_level_data_buffer.resize(buffer_size);

let def_level_data = def_level_data_buffer.typed_data_mut();

Expand Down
26 changes: 12 additions & 14 deletions rust/parquet/src/arrow/record_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ impl<T: DataType> RecordReader<T> {
let mut new_buffer = MutableBuffer::new(
size_of::<i16>() * max(MIN_BATCH_SIZE, num_left_values),
);
new_buffer.resize(num_left_values * size_of::<i16>())?;
new_buffer.resize(num_left_values * size_of::<i16>());

let mut new_def_levels = FatPtr::<i16>::with_offset(&mut new_buffer, 0);
let new_def_levels = new_def_levels.to_slice_mut();
Expand All @@ -213,7 +213,7 @@ impl<T: DataType> RecordReader<T> {
new_def_levels[0..num_left_values]
.copy_from_slice(&left_def_levels[0..num_left_values]);

def_levels_buf.resize(self.num_values * size_of::<i16>())?;
def_levels_buf.resize(self.num_values * size_of::<i16>());
Some(new_buffer)
} else {
None
Expand All @@ -231,7 +231,7 @@ impl<T: DataType> RecordReader<T> {
let mut new_buffer = MutableBuffer::new(
size_of::<i16>() * max(MIN_BATCH_SIZE, num_left_values),
);
new_buffer.resize(num_left_values * size_of::<i16>())?;
new_buffer.resize(num_left_values * size_of::<i16>());

let mut new_rep_levels = FatPtr::<i16>::with_offset(&mut new_buffer, 0);
let new_rep_levels = new_rep_levels.to_slice_mut();
Expand All @@ -242,7 +242,7 @@ impl<T: DataType> RecordReader<T> {
new_rep_levels[0..num_left_values]
.copy_from_slice(&left_rep_levels[0..num_left_values]);

rep_levels_buf.resize(self.num_values * size_of::<i16>())?;
rep_levels_buf.resize(self.num_values * size_of::<i16>());

Some(new_buffer)
} else {
Expand All @@ -258,7 +258,7 @@ impl<T: DataType> RecordReader<T> {
// TODO: Optimize to reduce the copy
let num_left_values = self.values_written - self.num_values;
let mut new_buffer = MutableBuffer::new(max(MIN_BATCH_SIZE, num_left_values));
new_buffer.resize(num_left_values * T::get_type_size())?;
new_buffer.resize(num_left_values * T::get_type_size());

let mut new_records =
FatPtr::<T::T>::with_offset_and_size(&mut new_buffer, 0, T::get_type_size());
Expand All @@ -274,7 +274,7 @@ impl<T: DataType> RecordReader<T> {
swap(&mut new_records[idx], &mut left_records[idx]);
}

self.records.resize(self.num_values * T::get_type_size())?;
self.records.resize(self.num_values * T::get_type_size());

Ok(replace(&mut self.records, new_buffer).freeze())
}
Expand Down Expand Up @@ -331,14 +331,12 @@ impl<T: DataType> RecordReader<T> {
fn read_one_batch(&mut self, batch_size: usize) -> Result<usize> {
// Reserve spaces
self.records
.reserve(self.records.len() + batch_size * T::get_type_size())?;
.reserve(self.records.len() + batch_size * T::get_type_size());
if let Some(ref mut buf) = self.rep_levels {
buf.reserve(buf.len() + batch_size * size_of::<i16>())
.map(|_| ())?;
buf.reserve(buf.len() + batch_size * size_of::<i16>());
}
if let Some(ref mut buf) = self.def_levels {
buf.reserve(buf.len() + batch_size * size_of::<i16>())
.map(|_| ())?;
buf.reserve(buf.len() + batch_size * size_of::<i16>());
}

// Convert mutable buffer spaces to mutable slices
Expand Down Expand Up @@ -468,16 +466,16 @@ impl<T: DataType> RecordReader<T> {
fn set_values_written(&mut self, new_values_written: usize) -> Result<()> {
self.values_written = new_values_written;
self.records
.resize(self.values_written * T::get_type_size())?;
.resize(self.values_written * T::get_type_size());

let new_levels_len = self.values_written * size_of::<i16>();

if let Some(ref mut buf) = self.rep_levels {
buf.resize(new_levels_len)?
buf.resize(new_levels_len)
};

if let Some(ref mut buf) = self.def_levels {
buf.resize(new_levels_len)?
buf.resize(new_levels_len)
};

Ok(())
Expand Down

0 comments on commit 1605cfb

Please sign in to comment.