Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: df 35 upgrade #2599

Merged
merged 6 commits into from
Feb 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
537 changes: 370 additions & 167 deletions Cargo.lock

Large diffs are not rendered by default.

14 changes: 6 additions & 8 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ wildcard_imports = "deny"

[workspace.dependencies]
clap = { version = "4.4.18", features = ["derive"] }
datafusion = { version = "34.0", features = ["avro"] }
arrow-flight = { version = "49.0.0", features = ["flight-sql-experimental"] }
datafusion-proto = { version = "34.0" }
datafusion = { version = "35.0.0", features = ["avro"] }
arrow-flight = { version = "50.0.0", features = ["flight-sql-experimental"] }
datafusion-proto = { version = "35.0.0" }
reqwest = { version = "0.11.24", default-features = false, features = [
"json",
"rustls-tls",
Expand All @@ -32,7 +32,7 @@ anyhow = "1.0.79"
async-trait = "0.1.77"
chrono = "0.4.33"
futures = "0.3.30"
object_store = "0.8"
object_store = "0.9"
prost = "0.12"
prost-build = "0.12"
prost-types = "0.12"
Expand All @@ -44,8 +44,6 @@ url = "2.5.0"

[workspace.dependencies.deltalake]

git = "https://github.com/GlareDB/delta-rs.git"
rev = "4c4bd7d5d4ce47f47f348c2a7d52c009048f841e"
git = "https://github.com/delta-io/delta-rs.git"
rev = "993e2c202936719855f8831513bcbab1b9930b94"
features = ["s3", "gcs", "azure", "datafusion"]


41 changes: 31 additions & 10 deletions crates/datafusion_ext/src/planner/expr/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,19 @@ use datafusion::common::{
plan_err,
DFSchema,
DataFusionError,
Dependency,
Result,
};
use datafusion::logical_expr::expr::ScalarFunction;
use datafusion::logical_expr::expr::{find_df_window_func, ScalarFunction};
use datafusion::logical_expr::function::suggest_valid_function;
use datafusion::logical_expr::window_frame::{check_window_frame, regularize_window_order_by};
use datafusion::logical_expr::{
expr,
window_function,
AggregateFunction,
BuiltinScalarFunction,
Expr,
WindowFrame,
WindowFunction,
WindowFunctionDefinition,
};
use datafusion::sql::planner::PlannerContext;
use datafusion::sql::sqlparser::ast::{
Expand Down Expand Up @@ -119,6 +119,7 @@ impl<'a, S: AsyncContextProvider> SqlQueryPlanner<'a, S> {
}
partition_by
};

let mut order_by = self
.order_by_to_sort_expr(
&window.order_by,
Expand All @@ -128,6 +129,21 @@ impl<'a, S: AsyncContextProvider> SqlQueryPlanner<'a, S> {
false,
)
.await?;

let func_deps = schema.functional_dependencies();
// Find whether ties are possible in the given ordering:
let is_ordering_strict = order_by.iter().any(|orderby_expr| {
if let Expr::Sort(sort_expr) = orderby_expr {
if let Expr::Column(col) = sort_expr.expr.as_ref() {
let idx = schema.index_of_column(col).unwrap();
return func_deps.iter().any(|dep| {
dep.source_indices == vec![idx] && dep.mode == Dependency::Single
});
}
}
false
});

let window_frame = window
.window_frame
.as_ref()
Expand All @@ -140,15 +156,17 @@ impl<'a, S: AsyncContextProvider> SqlQueryPlanner<'a, S> {
let window_frame = if let Some(window_frame) = window_frame {
regularize_window_order_by(&window_frame, &mut order_by)?;
window_frame
} else if is_ordering_strict {
WindowFrame::new(Some(true))
} else {
WindowFrame::new(!order_by.is_empty())
WindowFrame::new((!order_by.is_empty()).then_some(false))
};

if let Ok(fun) = self.find_window_func(&name).await {
let expr = match fun {
WindowFunction::AggregateFunction(aggregate_fun) => {
WindowFunctionDefinition::AggregateFunction(aggregate_fun) => {
Expr::WindowFunction(expr::WindowFunction::new(
WindowFunction::AggregateFunction(aggregate_fun),
WindowFunctionDefinition::AggregateFunction(aggregate_fun),
args,
partition_by,
order_by,
Expand Down Expand Up @@ -220,17 +238,20 @@ impl<'a, S: AsyncContextProvider> SqlQueryPlanner<'a, S> {
Ok(Expr::ScalarFunction(ScalarFunction::new(fun, args)))
}

pub(super) async fn find_window_func(&mut self, name: &str) -> Result<WindowFunction> {
if let Some(func) = window_function::find_df_window_func(name) {
pub(super) async fn find_window_func(
&mut self,
name: &str,
) -> Result<WindowFunctionDefinition> {
if let Some(func) = find_df_window_func(name) {
return Ok(func);
}

if let Some(agg) = self.context_provider.get_aggregate_meta(name).await {
return Ok(WindowFunction::AggregateUDF(agg));
return Ok(expr::WindowFunctionDefinition::AggregateUDF(agg));
}

if let Some(win) = self.context_provider.get_window_meta(name).await {
return Ok(WindowFunction::WindowUDF(win));
return Ok(WindowFunctionDefinition::WindowUDF(win));
}

Err(plan_datafusion_err!(
Expand Down
2 changes: 1 addition & 1 deletion crates/datafusion_ext/src/planner/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ impl<'a, S: AsyncContextProvider> SqlQueryPlanner<'a, S> {
alias: TableAlias,
) -> Result<LogicalPlan> {
let apply_name_plan = LogicalPlan::SubqueryAlias(SubqueryAlias::try_new(
plan,
Arc::new(plan),
self.normalizer.normalize(alias.name),
)?);

Expand Down
9 changes: 6 additions & 3 deletions crates/datafusion_ext/src/planner/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,12 @@ impl<'a, S: AsyncContextProvider> SqlQueryPlanner<'a, S> {
// A `WITH` block can't use the same name more than once
let cte_name = self.normalizer.normalize(cte.alias.name.clone());
if planner_context.contains_cte(&cte_name) {
return Err(DataFusionError::SQL(ParserError(format!(
"WITH query name {cte_name:?} specified more than once"
))));
return Err(DataFusionError::SQL(
ParserError(format!(
"WITH query name {cte_name:?} specified more than once"
)),
None,
));
}
// create logical plan & pass backreferencing CTEs
// CTE expr don't need extend outer_query_schema
Expand Down
1 change: 1 addition & 0 deletions crates/datafusion_ext/src/planner/statement.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ impl<'a, S: AsyncContextProvider> SqlQueryPlanner<'a, S> {
datafusion::common::SchemaError::DuplicateUnqualifiedField {
name: c.clone(),
},
Box::new(None),
));
} else {
value_indices[column_index] = Some(i);
Expand Down
2 changes: 1 addition & 1 deletion crates/datasources/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ tiberius = { version = "0.12.2", default-features = false, features = [
"rustls",
"chrono",
] }
lance = { git = "https://github.com/universalmind303/lance", rev = "ffd4ac6ee2c61b3792a904b2e12152b246e837e6" }
lance = { git = "https://github.com/lancedb/lance", rev = "310d79eccf93f3c6a48c162c575918cdba13faec" }
bson = "2.9.0"
scylla = { version = "0.11.1" }
glob = "0.3.1"
Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/bigquery/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -400,11 +400,15 @@ impl ExecutionPlan for BigQueryExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for BigQueryExec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for BigQueryExec".to_string(),
))
}
}

fn execute(
Expand Down
13 changes: 9 additions & 4 deletions crates/datasources/src/cassandra/exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,13 +72,18 @@ impl ExecutionPlan for CassandraExec {
fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {
vec![]
}

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for ScyllaExec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for ScyllaExec".to_string(),
))
}
}
fn execute(
&self,
Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/clickhouse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -426,11 +426,15 @@ impl ExecutionPlan for ClickhouseExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for ClickhouseExec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for ClickhouseExec".to_string(),
))
}
}

fn execute(
Expand Down
3 changes: 1 addition & 2 deletions crates/datasources/src/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ pub(crate) fn exprs_to_phys_exprs(
) -> Result<Option<Arc<dyn PhysicalExpr>>> {
if let Some(expr) = conjunction(exprs.to_vec()) {
let table_df_schema = schema.clone().to_dfschema()?;
let filters =
create_physical_expr(&expr, &table_df_schema, schema, state.execution_props())?;
let filters = create_physical_expr(&expr, &table_df_schema, state.execution_props())?;
Ok(Some(filters))
} else {
Ok(None)
Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/debug/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,11 +263,15 @@ impl ExecutionPlan for DebugTableExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for DebugTableExec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for DebugTableExec".to_string(),
))
}
}

fn execute(
Expand Down
2 changes: 1 addition & 1 deletion crates/datasources/src/lake/iceberg/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@ impl TableProvider for IcebergTableReader {
) -> DataFusionResult<Arc<dyn ExecutionPlan>> {
// Create the datafusion specific url, and register the object store.
let object_url = datasource_url_to_unique_url(&self.state.location);

ctx.runtime_env()
.object_store_registry
.register_store(object_url.as_ref(), self.state.store.clone());
Expand Down Expand Up @@ -348,7 +349,6 @@ impl TableProvider for IcebergTableReader {
limit,
table_partition_cols: Vec::new(),
output_ordering: Vec::new(),
infinite_source: false,
};

let plan = ParquetFormat::new()
Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/mongodb/exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,11 +79,15 @@ impl ExecutionPlan for MongoDbBsonExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for MongoDB Exec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for MongoDB Exec".to_string(),
))
}
}

fn execute(
Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/mysql/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -495,11 +495,15 @@ impl ExecutionPlan for MysqlExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for MysqlExec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for MysqlExec".to_string(),
))
}
}

fn execute(
Expand Down
1 change: 0 additions & 1 deletion crates/datasources/src/object_store/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,6 @@ impl TableProvider for ObjStoreTableProvider {
limit,
table_partition_cols: Vec::new(),
output_ordering: Vec::new(),
infinite_source: false,
};
let filters = exprs_to_phys_exprs(filters, ctx, &self.arrow_schema)?;

Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/postgres/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -864,11 +864,15 @@ impl ExecutionPlan for PostgresBinaryCopyExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for PostgresBinaryCopyExec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for PostgresBinaryCopyExec".to_string(),
))
}
}

fn execute(
Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/postgres/query_exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,15 @@ impl ExecutionPlan for PostgresQueryExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DataFusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for PostgresQueryExec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for PostgresQueryExec".to_string(),
))
}
}

fn execute(
Expand Down
12 changes: 8 additions & 4 deletions crates/datasources/src/snowflake/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -425,11 +425,15 @@ impl ExecutionPlan for SnowflakeExec {

fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> DatafusionResult<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::Execution(
"cannot replace children for Snowflake exec".to_string(),
))
if children.is_empty() {
Ok(self)
} else {
Err(DataFusionError::Execution(
"cannot replace children for Snowflake exec".to_string(),
))
}
}

fn execute(
Expand Down
Loading
Loading