Skip to content

Commit

Permalink
Remove from_slice and use upstream
Browse files Browse the repository at this point in the history
  • Loading branch information
alamb committed Jun 7, 2023
1 parent 1d3860d commit 2a41306
Show file tree
Hide file tree
Showing 43 changed files with 211 additions and 363 deletions.
5 changes: 2 additions & 3 deletions datafusion-examples/examples/dataframe_in_memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ use datafusion::arrow::array::{Int32Array, StringArray};
use datafusion::arrow::datatypes::{DataType, Field, Schema};
use datafusion::arrow::record_batch::RecordBatch;
use datafusion::error::Result;
use datafusion::from_slice::FromSlice;
use datafusion::prelude::*;

/// This example demonstrates how to use the DataFrame API against in-memory data.
Expand All @@ -37,8 +36,8 @@ async fn main() -> Result<()> {
let batch = RecordBatch::try_new(
schema,
vec![
Arc::new(StringArray::from_slice(["a", "b", "c", "d"])),
Arc::new(Int32Array::from_slice([1, 10, 10, 100])),
Arc::new(StringArray::from(vec!["a", "b", "c", "d"])),
Arc::new(Int32Array::from(vec![1, 10, 10, 100])),
],
)?;

Expand Down
5 changes: 2 additions & 3 deletions datafusion-examples/examples/simple_udaf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
use datafusion::arrow::{
array::ArrayRef, array::Float32Array, datatypes::DataType, record_batch::RecordBatch,
};
use datafusion::from_slice::FromSlice;
use datafusion::{error::Result, physical_plan::Accumulator};
use datafusion::{logical_expr::Volatility, prelude::*, scalar::ScalarValue};
use datafusion_common::cast::as_float64_array;
Expand All @@ -37,11 +36,11 @@ fn create_context() -> Result<SessionContext> {
// define data in two partitions
let batch1 = RecordBatch::try_new(
schema.clone(),
vec![Arc::new(Float32Array::from_slice([2.0, 4.0, 8.0]))],
vec![Arc::new(Float32Array::from(vec![2.0, 4.0, 8.0]))],
)?;
let batch2 = RecordBatch::try_new(
schema.clone(),
vec![Arc::new(Float32Array::from_slice([64.0]))],
vec![Arc::new(Float32Array::from(vec![64.0]))],
)?;

// declare a new context. In spark API, this corresponds to a new spark SQLsession
Expand Down
5 changes: 2 additions & 3 deletions datafusion-examples/examples/simple_udf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ use datafusion::{
logical_expr::Volatility,
};

use datafusion::from_slice::FromSlice;
use datafusion::prelude::*;
use datafusion::{error::Result, physical_plan::functions::make_scalar_function};
use datafusion_common::cast::as_float64_array;
Expand All @@ -43,8 +42,8 @@ fn create_context() -> Result<SessionContext> {
let batch = RecordBatch::try_new(
schema,
vec![
Arc::new(Float32Array::from_slice([2.1, 3.1, 4.1, 5.1])),
Arc::new(Float64Array::from_slice([1.0, 2.0, 3.0, 4.0])),
Arc::new(Float32Array::from(vec![2.1, 3.1, 4.1, 5.1])),
Arc::new(Float64Array::from(vec![1.0, 2.0, 3.0, 4.0])),
],
)?;

Expand Down
116 changes: 0 additions & 116 deletions datafusion/common/src/from_slice.rs

This file was deleted.

1 change: 0 additions & 1 deletion datafusion/common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ pub mod config;
pub mod delta;
mod dfschema;
mod error;
pub mod from_slice;
mod join_type;
pub mod parsers;
#[cfg(feature = "pyarrow")]
Expand Down
25 changes: 11 additions & 14 deletions datafusion/common/src/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3851,7 +3851,6 @@ mod tests {
use rand::Rng;

use crate::cast::{as_string_array, as_uint32_array, as_uint64_array};
use crate::from_slice::FromSlice;

use super::*;

Expand Down Expand Up @@ -4827,26 +4826,26 @@ mod tests {
let expected = Arc::new(StructArray::from(vec![
(
field_a.clone(),
Arc::new(Int32Array::from_slice([23, 23])) as ArrayRef,
Arc::new(Int32Array::from(vec![23, 23])) as ArrayRef,
),
(
field_b.clone(),
Arc::new(BooleanArray::from_slice([false, false])) as ArrayRef,
Arc::new(BooleanArray::from(vec![false, false])) as ArrayRef,
),
(
field_c.clone(),
Arc::new(StringArray::from_slice(["Hello", "Hello"])) as ArrayRef,
Arc::new(StringArray::from(vec!["Hello", "Hello"])) as ArrayRef,
),
(
field_d.clone(),
Arc::new(StructArray::from(vec![
(
field_e.clone(),
Arc::new(Int16Array::from_slice([2, 2])) as ArrayRef,
Arc::new(Int16Array::from(vec![2, 2])) as ArrayRef,
),
(
field_f.clone(),
Arc::new(Int64Array::from_slice([3, 3])) as ArrayRef,
Arc::new(Int64Array::from(vec![3, 3])) as ArrayRef,
),
])) as ArrayRef,
),
Expand Down Expand Up @@ -4922,27 +4921,26 @@ mod tests {
let expected = Arc::new(StructArray::from(vec![
(
field_a,
Arc::new(Int32Array::from_slice([23, 7, -1000])) as ArrayRef,
Arc::new(Int32Array::from(vec![23, 7, -1000])) as ArrayRef,
),
(
field_b,
Arc::new(BooleanArray::from_slice([false, true, true])) as ArrayRef,
Arc::new(BooleanArray::from(vec![false, true, true])) as ArrayRef,
),
(
field_c,
Arc::new(StringArray::from_slice(["Hello", "World", "!!!!!"]))
as ArrayRef,
Arc::new(StringArray::from(vec!["Hello", "World", "!!!!!"])) as ArrayRef,
),
(
field_d,
Arc::new(StructArray::from(vec![
(
field_e,
Arc::new(Int16Array::from_slice([2, 4, 6])) as ArrayRef,
Arc::new(Int16Array::from(vec![2, 4, 6])) as ArrayRef,
),
(
field_f,
Arc::new(Int64Array::from_slice([3, 5, 7])) as ArrayRef,
Arc::new(Int64Array::from(vec![3, 5, 7])) as ArrayRef,
),
])) as ArrayRef,
),
Expand Down Expand Up @@ -5003,8 +5001,7 @@ mod tests {
let expected = StructArray::from(vec![
(
field_a.clone(),
Arc::new(StringArray::from_slice(["First", "Second", "Third"]))
as ArrayRef,
Arc::new(StringArray::from(vec!["First", "Second", "Third"])) as ArrayRef,
),
(
field_primitive_list.clone(),
Expand Down
44 changes: 20 additions & 24 deletions datafusion/common/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,24 +388,22 @@ pub mod datafusion_strsim {

#[cfg(test)]
mod tests {
use crate::ScalarValue;
use crate::ScalarValue::Null;
use arrow::array::Float64Array;
use arrow_array::Array;
use std::ops::Range;
use std::sync::Arc;

use crate::from_slice::FromSlice;
use crate::ScalarValue;
use crate::ScalarValue::Null;

use super::*;

#[test]
fn test_bisect_linear_left_and_right() -> Result<()> {
let arrays: Vec<ArrayRef> = vec![
Arc::new(Float64Array::from_slice([5.0, 7.0, 8.0, 9., 10.])),
Arc::new(Float64Array::from_slice([2.0, 3.0, 3.0, 4.0, 5.0])),
Arc::new(Float64Array::from_slice([5.0, 7.0, 8.0, 10., 11.0])),
Arc::new(Float64Array::from_slice([15.0, 13.0, 8.0, 5., 0.0])),
Arc::new(Float64Array::from(vec![5.0, 7.0, 8.0, 9., 10.])),
Arc::new(Float64Array::from(vec![2.0, 3.0, 3.0, 4.0, 5.0])),
Arc::new(Float64Array::from(vec![5.0, 7.0, 8.0, 10., 11.0])),
Arc::new(Float64Array::from(vec![15.0, 13.0, 8.0, 5., 0.0])),
];
let search_tuple: Vec<ScalarValue> = vec![
ScalarValue::Float64(Some(8.0)),
Expand Down Expand Up @@ -478,9 +476,8 @@ mod tests {
#[test]
fn test_bisect_linear_left_and_right_diff_sort() -> Result<()> {
// Descending, left
let arrays: Vec<ArrayRef> = vec![Arc::new(Float64Array::from_slice([
4.0, 3.0, 2.0, 1.0, 0.0,
]))];
let arrays: Vec<ArrayRef> =
vec![Arc::new(Float64Array::from(vec![4.0, 3.0, 2.0, 1.0, 0.0]))];
let search_tuple: Vec<ScalarValue> = vec![ScalarValue::Float64(Some(4.0))];
let ords = [SortOptions {
descending: true,
Expand All @@ -492,9 +489,8 @@ mod tests {
assert_eq!(res, 0);

// Descending, right
let arrays: Vec<ArrayRef> = vec![Arc::new(Float64Array::from_slice([
4.0, 3.0, 2.0, 1.0, 0.0,
]))];
let arrays: Vec<ArrayRef> =
vec![Arc::new(Float64Array::from(vec![4.0, 3.0, 2.0, 1.0, 0.0]))];
let search_tuple: Vec<ScalarValue> = vec![ScalarValue::Float64(Some(4.0))];
let ords = [SortOptions {
descending: true,
Expand All @@ -507,7 +503,7 @@ mod tests {

// Ascending, left
let arrays: Vec<ArrayRef> =
vec![Arc::new(Float64Array::from_slice([5.0, 7.0, 8.0, 9., 10.]))];
vec![Arc::new(Float64Array::from(vec![5.0, 7.0, 8.0, 9., 10.]))];
let search_tuple: Vec<ScalarValue> = vec![ScalarValue::Float64(Some(7.0))];
let ords = [SortOptions {
descending: false,
Expand All @@ -520,7 +516,7 @@ mod tests {

// Ascending, right
let arrays: Vec<ArrayRef> =
vec![Arc::new(Float64Array::from_slice([5.0, 7.0, 8.0, 9., 10.]))];
vec![Arc::new(Float64Array::from(vec![5.0, 7.0, 8.0, 9., 10.]))];
let search_tuple: Vec<ScalarValue> = vec![ScalarValue::Float64(Some(7.0))];
let ords = [SortOptions {
descending: false,
Expand All @@ -532,8 +528,8 @@ mod tests {
assert_eq!(res, 2);

let arrays: Vec<ArrayRef> = vec![
Arc::new(Float64Array::from_slice([5.0, 7.0, 8.0, 8.0, 9., 10.])),
Arc::new(Float64Array::from_slice([10.0, 9.0, 8.0, 7.5, 7., 6.])),
Arc::new(Float64Array::from(vec![5.0, 7.0, 8.0, 8.0, 9., 10.])),
Arc::new(Float64Array::from(vec![10.0, 9.0, 8.0, 7.5, 7., 6.])),
];
let search_tuple: Vec<ScalarValue> = vec![
ScalarValue::Float64(Some(8.0)),
Expand Down Expand Up @@ -564,8 +560,8 @@ mod tests {
#[test]
fn test_evaluate_partition_ranges() -> Result<()> {
let arrays: Vec<ArrayRef> = vec![
Arc::new(Float64Array::from_slice([1.0, 1.0, 1.0, 2.0, 2.0, 2.0])),
Arc::new(Float64Array::from_slice([4.0, 4.0, 3.0, 2.0, 1.0, 1.0])),
Arc::new(Float64Array::from(vec![1.0, 1.0, 1.0, 2.0, 2.0, 2.0])),
Arc::new(Float64Array::from(vec![4.0, 4.0, 3.0, 2.0, 1.0, 1.0])),
];
let n_row = arrays[0].len();
let options: Vec<SortOptions> = vec![
Expand Down Expand Up @@ -641,10 +637,10 @@ mod tests {
#[test]
fn test_get_arrayref_at_indices() -> Result<()> {
let arrays: Vec<ArrayRef> = vec![
Arc::new(Float64Array::from_slice([5.0, 7.0, 8.0, 9., 10.])),
Arc::new(Float64Array::from_slice([2.0, 3.0, 3.0, 4.0, 5.0])),
Arc::new(Float64Array::from_slice([5.0, 7.0, 8.0, 10., 11.0])),
Arc::new(Float64Array::from_slice([15.0, 13.0, 8.0, 5., 0.0])),
Arc::new(Float64Array::from(vec![5.0, 7.0, 8.0, 9., 10.])),
Arc::new(Float64Array::from(vec![2.0, 3.0, 3.0, 4.0, 5.0])),
Arc::new(Float64Array::from(vec![5.0, 7.0, 8.0, 10., 11.0])),
Arc::new(Float64Array::from(vec![15.0, 13.0, 8.0, 5., 0.0])),
];

let row_indices_vec: Vec<Vec<u32>> = vec![
Expand Down
3 changes: 1 addition & 2 deletions datafusion/core/benches/data_utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ use arrow::{
};
use datafusion::datasource::MemTable;
use datafusion::error::Result;
use datafusion::from_slice::FromSlice;
use rand::rngs::StdRng;
use rand::seq::SliceRandom;
use rand::{Rng, SeedableRng};
Expand Down Expand Up @@ -131,7 +130,7 @@ fn create_record_batch(
schema,
vec![
Arc::new(StringArray::from(keys)),
Arc::new(Float32Array::from_slice(vec![i as f32; batch_size])),
Arc::new(Float32Array::from(vec![i as f32; batch_size])),
Arc::new(Float64Array::from(values)),
Arc::new(UInt64Array::from(integer_values_wide)),
Arc::new(UInt64Array::from(integer_values_narrow)),
Expand Down
Loading

0 comments on commit 2a41306

Please sign in to comment.