Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ datafusion-common-runtime = { version = "50" }
datafusion-datasource = { version = "50", default-features = false }
datafusion-execution = { version = "50" }
datafusion-expr = { version = "50" }
datafusion-functions = { version = "50" }
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The only reason to add this dependency was to be able to check that a scalar function was a get_field function. The other option is to check that fn.name() == 'get_field', but that seemed brittle.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm all for delegating to DF in these cases

datafusion-physical-expr = { version = "50" }
datafusion-physical-expr-adapter = { version = "50" }
datafusion-physical-expr-common = { version = "50" }
Expand Down
1 change: 1 addition & 0 deletions vortex-datafusion/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ datafusion-common-runtime = { workspace = true }
datafusion-datasource = { workspace = true, default-features = false }
datafusion-execution = { workspace = true }
datafusion-expr = { workspace = true }
datafusion-functions = { workspace = true }
datafusion-physical-expr = { workspace = true }
datafusion-physical-expr-adapter = { workspace = true }
datafusion-physical-expr-common = { workspace = true }
Expand Down
120 changes: 117 additions & 3 deletions vortex-datafusion/src/convert/exprs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ use std::sync::Arc;

use arrow_schema::{DataType, Schema};
use datafusion_expr::Operator as DFOperator;
use datafusion_physical_expr::{PhysicalExpr, PhysicalExprRef};
use datafusion_functions::core::getfield::GetFieldFunc;
use datafusion_physical_expr::{PhysicalExpr, PhysicalExprRef, ScalarFunctionExpr};
use datafusion_physical_expr_common::physical_expr::is_dynamic_physical_expr;
use datafusion_physical_plan::expressions as df_expr;
use itertools::Itertools;
Expand Down Expand Up @@ -104,10 +105,47 @@ impl TryFromDataFusion<dyn PhysicalExpr> for ExprRef {
return Ok(if in_list.negated() { not(expr) } else { expr });
}

if let Some(scalar_fn) = df.as_any().downcast_ref::<ScalarFunctionExpr>() {
return try_convert_scalar_function(scalar_fn);
}

vortex_bail!("Couldn't convert DataFusion physical {df} expression to a vortex expression")
}
}

/// Attempts to convert a DataFusion ScalarFunctionExpr to a Vortex expression.
fn try_convert_scalar_function(scalar_fn: &ScalarFunctionExpr) -> VortexResult<ExprRef> {
if let Some(get_field_fn) = ScalarFunctionExpr::try_downcast_func::<GetFieldFunc>(scalar_fn) {
let source_expr = get_field_fn
.args()
.first()
.ok_or_else(|| vortex_err!("get_field missing source expression"))?
.as_ref();
let field_name_expr = get_field_fn
.args()
.get(1)
.ok_or_else(|| vortex_err!("get_field missing field name argument"))?;
let field_name = field_name_expr
.as_any()
.downcast_ref::<df_expr::Literal>()
.ok_or_else(|| vortex_err!("get_field field name must be a literal"))?
.value()
.try_as_str()
.flatten()
.ok_or_else(|| vortex_err!("get_field field name must be a UTF-8 string"))?;
return Ok(get_item(
field_name.to_string(),
ExprRef::try_from_df(source_expr)?,
));
}

tracing::debug!(
function_name = scalar_fn.name(),
"Unsupported ScalarFunctionExpr"
);
vortex_bail!("Unsupported ScalarFunctionExpr: {}", scalar_fn.name())
}

impl TryFromDataFusion<DFOperator> for Operator {
fn try_from_df(value: &DFOperator) -> VortexResult<Self> {
match value {
Expand Down Expand Up @@ -188,6 +226,9 @@ pub(crate) fn can_be_pushed_down(df_expr: &PhysicalExprRef, schema: &Schema) ->
} else if let Some(in_list) = expr.downcast_ref::<df_expr::InListExpr>() {
can_be_pushed_down(in_list.expr(), schema)
&& in_list.list().iter().all(|e| can_be_pushed_down(e, schema))
} else if let Some(scalar_fn) = expr.downcast_ref::<ScalarFunctionExpr>() {
// Only get_field pushdown is supported.
ScalarFunctionExpr::try_downcast_func::<GetFieldFunc>(scalar_fn).is_some()
} else {
tracing::debug!(%df_expr, "DataFusion expression can't be pushed down");
false
Expand All @@ -203,6 +244,12 @@ fn can_binary_be_pushed_down(binary: &df_expr::BinaryExpr, schema: &Schema) -> b

fn supported_data_types(dt: &DataType) -> bool {
use DataType::*;

// For dictionary types, check if the value type is supported.
if let Dictionary(_, value_type) = dt {
return supported_data_types(value_type.as_ref());
}

let is_supported = dt.is_null()
|| dt.is_numeric()
|| matches!(
Expand Down Expand Up @@ -232,9 +279,11 @@ fn supported_data_types(dt: &DataType) -> bool {
mod tests {
use std::sync::Arc;

use arrow_schema::{DataType, Field, Schema, TimeUnit as ArrowTimeUnit};
use arrow_schema::{DataType, Field, Fields, Schema, TimeUnit as ArrowTimeUnit};
use datafusion::functions::core::getfield::GetFieldFunc;
use datafusion_common::ScalarValue;
use datafusion_expr::Operator as DFOperator;
use datafusion_common::config::ConfigOptions;
use datafusion_expr::{Operator as DFOperator, ScalarUDF};
use datafusion_physical_expr::PhysicalExpr;
use datafusion_physical_plan::expressions as df_expr;
use insta::assert_snapshot;
Expand Down Expand Up @@ -415,6 +464,22 @@ mod tests {
false
)]
#[case::struct_type(DataType::Struct(vec![Field::new("field", DataType::Int32, true)].into()), false)]
// Dictionary types - should be supported if value type is supported
#[case::dict_utf8(
DataType::Dictionary(Box::new(DataType::UInt32), Box::new(DataType::Utf8)),
true
)]
#[case::dict_int32(
DataType::Dictionary(Box::new(DataType::UInt32), Box::new(DataType::Int32)),
true
)]
#[case::dict_unsupported(
DataType::Dictionary(
Box::new(DataType::UInt32),
Box::new(DataType::List(Arc::new(Field::new("item", DataType::Int32, true))))
),
false
)]
fn test_supported_data_types(#[case] data_type: DataType, #[case] expected: bool) {
assert_eq!(supported_data_types(&data_type), expected);
}
Expand Down Expand Up @@ -518,4 +583,53 @@ mod tests {

assert!(!can_be_pushed_down(&like_expr, &test_schema));
}

#[test]
fn test_expr_from_df_get_field() {
let struct_col = Arc::new(df_expr::Column::new("my_struct", 0)) as Arc<dyn PhysicalExpr>;
let field_name = Arc::new(df_expr::Literal::new(ScalarValue::Utf8(Some(
"field1".to_string(),
)))) as Arc<dyn PhysicalExpr>;
let get_field_expr = ScalarFunctionExpr::new(
"get_field",
Arc::new(ScalarUDF::from(GetFieldFunc::new())),
vec![struct_col, field_name],
Arc::new(Field::new("field1", DataType::Utf8, true)),
Arc::new(ConfigOptions::new()),
);
let result = ExprRef::try_from_df(&get_field_expr).unwrap();
assert_snapshot!(result.display_tree().to_string(), @r"
GetItem(field1)
└── GetItem(my_struct)
└── Root
");
}

#[test]
fn test_can_be_pushed_down_get_field() {
let struct_fields = Fields::from(vec![
Field::new("field1", DataType::Utf8, true),
Field::new("field2", DataType::Int32, true),
]);
let schema = Schema::new(vec![Field::new(
"my_struct",
DataType::Struct(struct_fields),
true,
)]);

let struct_col = Arc::new(df_expr::Column::new("my_struct", 0)) as Arc<dyn PhysicalExpr>;
let field_name = Arc::new(df_expr::Literal::new(ScalarValue::Utf8(Some(
"field1".to_string(),
)))) as Arc<dyn PhysicalExpr>;

let get_field_expr = Arc::new(ScalarFunctionExpr::new(
"get_field",
Arc::new(ScalarUDF::from(GetFieldFunc::new())),
vec![struct_col, field_name],
Arc::new(Field::new("field1", DataType::Utf8, true)),
Arc::new(ConfigOptions::new()),
)) as Arc<dyn PhysicalExpr>;

assert!(can_be_pushed_down(&get_field_expr, &schema));
}
}
1 change: 1 addition & 0 deletions vortex-datafusion/src/convert/scalars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@ impl FromDataFusion<ScalarValue> for Scalar {
Scalar::null(DType::Decimal(decimal_dtype, nullable))
}
}
ScalarValue::Dictionary(_, v) => Scalar::from_df(v.as_ref()),
_ => unimplemented!("Can't convert {value:?} value to a Vortex scalar"),
}
}
Expand Down
Loading
Loading