Skip to content

Commit de2a6ef

Browse files
authored
Merge 1ddbbf4 into 713ba10
2 parents 713ba10 + 1ddbbf4 commit de2a6ef

File tree

13 files changed

+38
-36
lines changed

13 files changed

+38
-36
lines changed

benchmarks/Cargo.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,6 @@ simd = ["datafusion/simd"]
3131
snmalloc = ["snmalloc-rs"]
3232

3333
[dependencies]
34-
arrow = { git = "https://github.com/apache/arrow-rs", rev = "c3fe3bab9905739fdda75301dab07a18c91731bd" }
35-
parquet = { git = "https://github.com/apache/arrow-rs", rev = "c3fe3bab9905739fdda75301dab07a18c91731bd" }
3634
datafusion = { path = "../datafusion" }
3735
structopt = { version = "0.3", default-features = false }
3836
tokio = { version = "^1.0", features = ["macros", "rt", "rt-multi-thread"] }

benchmarks/src/bin/nyctaxi.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,9 @@ use std::path::PathBuf;
2222
use std::process;
2323
use std::time::Instant;
2424

25-
use arrow::datatypes::{DataType, Field, Schema};
26-
use arrow::util::pretty;
25+
use datafusion::arrow::datatypes::{DataType, Field, Schema};
26+
use datafusion::arrow::util::pretty;
27+
2728
use datafusion::error::Result;
2829
use datafusion::execution::context::{ExecutionConfig, ExecutionContext};
2930

benchmarks/src/bin/tpch.rs

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -23,17 +23,19 @@ use std::{
2323
sync::Arc,
2424
};
2525

26-
use arrow::datatypes::{DataType, Field, Schema};
27-
use arrow::util::pretty;
26+
use datafusion::arrow::datatypes::{DataType, Field, Schema};
27+
use datafusion::arrow::record_batch::RecordBatch;
28+
use datafusion::arrow::util::pretty;
29+
2830
use datafusion::datasource::parquet::ParquetTable;
2931
use datafusion::datasource::{CsvFile, MemTable, TableProvider};
3032
use datafusion::error::{DataFusionError, Result};
3133
use datafusion::logical_plan::LogicalPlan;
3234
use datafusion::physical_plan::collect;
3335
use datafusion::prelude::*;
3436

35-
use parquet::basic::Compression;
36-
use parquet::file::properties::WriterProperties;
37+
use datafusion::parquet::basic::Compression;
38+
use datafusion::parquet::file::properties::WriterProperties;
3739
use structopt::StructOpt;
3840

3941
#[cfg(feature = "snmalloc")]
@@ -130,7 +132,7 @@ async fn main() -> Result<()> {
130132
}
131133
}
132134

133-
async fn benchmark(opt: BenchmarkOpt) -> Result<Vec<arrow::record_batch::RecordBatch>> {
135+
async fn benchmark(opt: BenchmarkOpt) -> Result<Vec<RecordBatch>> {
134136
println!("Running benchmarks with the following options: {:?}", opt);
135137
let config = ExecutionConfig::new()
136138
.with_concurrency(opt.concurrency)
@@ -165,7 +167,7 @@ async fn benchmark(opt: BenchmarkOpt) -> Result<Vec<arrow::record_batch::RecordB
165167

166168
let mut millis = vec![];
167169
// run benchmark
168-
let mut result: Vec<arrow::record_batch::RecordBatch> = Vec::with_capacity(1);
170+
let mut result: Vec<RecordBatch> = Vec::with_capacity(1);
169171
for i in 0..opt.iterations {
170172
let start = Instant::now();
171173
let plan = create_logical_plan(&mut ctx, opt.query)?;
@@ -1013,7 +1015,7 @@ async fn execute_query(
10131015
ctx: &mut ExecutionContext,
10141016
plan: &LogicalPlan,
10151017
debug: bool,
1016-
) -> Result<Vec<arrow::record_batch::RecordBatch>> {
1018+
) -> Result<Vec<RecordBatch>> {
10171019
if debug {
10181020
println!("Logical plan:\n{:?}", plan);
10191021
}
@@ -1237,9 +1239,8 @@ mod tests {
12371239
use std::env;
12381240
use std::sync::Arc;
12391241

1240-
use arrow::array::*;
1241-
use arrow::record_batch::RecordBatch;
1242-
use arrow::util::display::array_value_to_string;
1242+
use datafusion::arrow::array::*;
1243+
use datafusion::arrow::util::display::array_value_to_string;
12431244

12441245
use datafusion::logical_plan::Expr;
12451246
use datafusion::logical_plan::Expr::Cast;

datafusion-examples/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ publish = false
2929

3030

3131
[dev-dependencies]
32-
arrow = { git = "https://github.com/apache/arrow-rs", rev = "c3fe3bab9905739fdda75301dab07a18c91731bd" }
3332
arrow-flight = { git = "https://github.com/apache/arrow-rs", rev = "c3fe3bab9905739fdda75301dab07a18c91731bd" }
3433
datafusion = { path = "../datafusion" }
3534
prost = "0.7"

datafusion-examples/examples/csv_sql.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
// specific language governing permissions and limitations
1616
// under the License.
1717

18-
use arrow::util::pretty;
18+
use datafusion::arrow::util::pretty;
1919

2020
use datafusion::error::Result;
2121
use datafusion::prelude::*;
@@ -27,7 +27,7 @@ async fn main() -> Result<()> {
2727
// create local execution context
2828
let mut ctx = ExecutionContext::new();
2929

30-
let testdata = arrow::util::test_util::arrow_test_data();
30+
let testdata = datafusion::arrow::util::test_util::arrow_test_data();
3131

3232
// register csv file with the execution context
3333
ctx.register_csv(

datafusion-examples/examples/dataframe.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
// specific language governing permissions and limitations
1616
// under the License.
1717

18-
use arrow::util::pretty;
18+
use datafusion::arrow::util::pretty;
1919

2020
use datafusion::error::Result;
2121
use datafusion::prelude::*;
@@ -27,7 +27,7 @@ async fn main() -> Result<()> {
2727
// create local execution context
2828
let mut ctx = ExecutionContext::new();
2929

30-
let testdata = arrow::util::test_util::parquet_test_data();
30+
let testdata = datafusion::arrow::util::test_util::parquet_test_data();
3131

3232
let filename = &format!("{}/alltypes_plain.parquet", testdata);
3333

datafusion-examples/examples/dataframe_in_memory.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,10 @@
1717

1818
use std::sync::Arc;
1919

20-
use arrow::array::{Int32Array, StringArray};
21-
use arrow::datatypes::{DataType, Field, Schema};
22-
use arrow::record_batch::RecordBatch;
23-
use arrow::util::pretty;
20+
use datafusion::arrow::array::{Int32Array, StringArray};
21+
use datafusion::arrow::datatypes::{DataType, Field, Schema};
22+
use datafusion::arrow::record_batch::RecordBatch;
23+
use datafusion::arrow::util::pretty;
2424

2525
use datafusion::datasource::MemTable;
2626
use datafusion::error::Result;

datafusion-examples/examples/flight_client.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
use std::convert::TryFrom;
1919
use std::sync::Arc;
2020

21-
use arrow::datatypes::Schema;
22-
use arrow::util::pretty;
21+
use datafusion::arrow::datatypes::Schema;
22+
use datafusion::arrow::util::pretty;
2323

2424
use arrow_flight::flight_descriptor;
2525
use arrow_flight::flight_service_client::FlightServiceClient;
@@ -31,7 +31,7 @@ use arrow_flight::{FlightDescriptor, Ticket};
3131
/// This example is run along-side the example `flight_server`.
3232
#[tokio::main]
3333
async fn main() -> Result<(), Box<dyn std::error::Error>> {
34-
let testdata = arrow::util::test_util::parquet_test_data();
34+
let testdata = datafusion::arrow::util::test_util::parquet_test_data();
3535

3636
// Create Flight client
3737
let mut client = FlightServiceClient::connect("http://localhost:50051").await?;

datafusion-examples/examples/flight_server.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ impl FlightService for FlightServiceImpl {
6666

6767
let table = ParquetTable::try_new(&request.path[0], num_cpus::get()).unwrap();
6868

69-
let options = arrow::ipc::writer::IpcWriteOptions::default();
69+
let options = datafusion::arrow::ipc::writer::IpcWriteOptions::default();
7070
let schema_result = arrow_flight::utils::flight_schema_from_arrow_schema(
7171
table.schema().as_ref(),
7272
&options,
@@ -87,7 +87,7 @@ impl FlightService for FlightServiceImpl {
8787
// create local execution context
8888
let mut ctx = ExecutionContext::new();
8989

90-
let testdata = arrow::util::test_util::parquet_test_data();
90+
let testdata = datafusion::arrow::util::test_util::parquet_test_data();
9191

9292
// register parquet file with the execution context
9393
ctx.register_parquet(
@@ -106,7 +106,7 @@ impl FlightService for FlightServiceImpl {
106106
}
107107

108108
// add an initial FlightData message that sends schema
109-
let options = arrow::ipc::writer::IpcWriteOptions::default();
109+
let options = datafusion::arrow::ipc::writer::IpcWriteOptions::default();
110110
let schema_flight_data =
111111
arrow_flight::utils::flight_data_from_arrow_schema(
112112
&df.schema().clone().into(),

datafusion-examples/examples/parquet_sql.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
// specific language governing permissions and limitations
1616
// under the License.
1717

18-
use arrow::util::pretty;
18+
use datafusion::arrow::util::pretty;
1919

2020
use datafusion::error::Result;
2121
use datafusion::prelude::*;
@@ -27,7 +27,7 @@ async fn main() -> Result<()> {
2727
// create local execution context
2828
let mut ctx = ExecutionContext::new();
2929

30-
let testdata = arrow::util::test_util::parquet_test_data();
30+
let testdata = datafusion::arrow::util::test_util::parquet_test_data();
3131

3232
// register parquet file with the execution context
3333
ctx.register_parquet(

0 commit comments

Comments
 (0)