Skip to content

Commit 8e85e9d

Browse files
committed
Add timezone to date_trunc fast path (apache#18596)
## Which issue does this PR close? <!-- We generally require a GitHub issue to be filed for all bug fixes and enhancements and this helps us generate change logs for our releases. You can link an issue to this PR using the GitHub syntax. For example `Closes apache#123` indicates that this PR will close issue apache#123. --> - Closes apache#18597 ## Rationale for this change <!-- Why are you proposing this change? If this is already explained clearly in the issue then this section is not needed. Explaining clearly why changes are proposed helps reviewers understand your changes and offer better suggestions for fixes. --> A check is recently added to `invoke_with_args` that checks for the output type of the result with the expected output type from the UDF - apache#17515. Because the fast path misses adding the timezone, the assertion added in this PR fails. ## What changes are included in this PR? <!-- There is no need to duplicate the description in the issue here but it is sometimes worth providing a summary of the individual changes in this PR. --> Include timezone information in the fast path. ## Are these changes tested? <!-- We typically require tests for all PRs in order to: 1. Prevent the code from being accidentally broken by subsequent changes 2. Serve as another way to document the expected behavior of the code If tests are not included in your PR, please explain why (for example, are they covered by existing tests)? --> Yes, added a unit test ## Are there any user-facing changes? <!-- If there are user-facing changes then we may require documentation to be updated before approving the PR. --> No <!-- If there are any breaking changes to public APIs, please add the `api change` label. -->
1 parent 49e347b commit 8e85e9d

File tree

2 files changed

+210
-22
lines changed

2 files changed

+210
-22
lines changed

datafusion/functions/src/datetime/date_trunc.rs

Lines changed: 176 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -276,6 +276,7 @@ impl ScalarUDFImpl for DateTruncFunc {
276276
T::UNIT,
277277
array,
278278
granularity,
279+
tz_opt.clone(),
279280
)?;
280281
return Ok(ColumnarValue::Array(result));
281282
}
@@ -522,6 +523,7 @@ fn general_date_trunc_array_fine_granularity<T: ArrowTimestampType>(
522523
tu: TimeUnit,
523524
array: &PrimitiveArray<T>,
524525
granularity: DateTruncGranularity,
526+
tz_opt: Option<Arc<str>>,
525527
) -> Result<ArrayRef> {
526528
let unit = match (tu, granularity) {
527529
(Second, DateTruncGranularity::Minute) => NonZeroI64::new(60),
@@ -556,7 +558,8 @@ fn general_date_trunc_array_fine_granularity<T: ArrowTimestampType>(
556558
.iter()
557559
.map(|v| *v - i64::rem_euclid(*v, unit)),
558560
array.nulls().cloned(),
559-
);
561+
)
562+
.with_timezone_opt(tz_opt);
560563
Ok(Arc::new(array))
561564
} else {
562565
// truncate to the same or smaller unit
@@ -1094,4 +1097,176 @@ mod tests {
10941097
}
10951098
});
10961099
}
1100+
1101+
#[test]
1102+
fn test_date_trunc_fine_granularity_timezones() {
1103+
let cases = [
1104+
// Test "second" granularity
1105+
(
1106+
vec![
1107+
"2020-09-08T13:42:29.190855Z",
1108+
"2020-09-08T13:42:30.500000Z",
1109+
"2020-09-08T13:42:31.999999Z",
1110+
],
1111+
Some("+00".into()),
1112+
"second",
1113+
vec![
1114+
"2020-09-08T13:42:29.000000Z",
1115+
"2020-09-08T13:42:30.000000Z",
1116+
"2020-09-08T13:42:31.000000Z",
1117+
],
1118+
),
1119+
(
1120+
vec![
1121+
"2020-09-08T13:42:29.190855+05",
1122+
"2020-09-08T13:42:30.500000+05",
1123+
"2020-09-08T13:42:31.999999+05",
1124+
],
1125+
Some("+05".into()),
1126+
"second",
1127+
vec![
1128+
"2020-09-08T13:42:29.000000+05",
1129+
"2020-09-08T13:42:30.000000+05",
1130+
"2020-09-08T13:42:31.000000+05",
1131+
],
1132+
),
1133+
(
1134+
vec![
1135+
"2020-09-08T13:42:29.190855Z",
1136+
"2020-09-08T13:42:30.500000Z",
1137+
"2020-09-08T13:42:31.999999Z",
1138+
],
1139+
Some("Europe/Berlin".into()),
1140+
"second",
1141+
vec![
1142+
"2020-09-08T13:42:29.000000Z",
1143+
"2020-09-08T13:42:30.000000Z",
1144+
"2020-09-08T13:42:31.000000Z",
1145+
],
1146+
),
1147+
// Test "minute" granularity
1148+
(
1149+
vec![
1150+
"2020-09-08T13:42:29.190855Z",
1151+
"2020-09-08T13:43:30.500000Z",
1152+
"2020-09-08T13:44:31.999999Z",
1153+
],
1154+
Some("+00".into()),
1155+
"minute",
1156+
vec![
1157+
"2020-09-08T13:42:00.000000Z",
1158+
"2020-09-08T13:43:00.000000Z",
1159+
"2020-09-08T13:44:00.000000Z",
1160+
],
1161+
),
1162+
(
1163+
vec![
1164+
"2020-09-08T13:42:29.190855+08",
1165+
"2020-09-08T13:43:30.500000+08",
1166+
"2020-09-08T13:44:31.999999+08",
1167+
],
1168+
Some("+08".into()),
1169+
"minute",
1170+
vec![
1171+
"2020-09-08T13:42:00.000000+08",
1172+
"2020-09-08T13:43:00.000000+08",
1173+
"2020-09-08T13:44:00.000000+08",
1174+
],
1175+
),
1176+
(
1177+
vec![
1178+
"2020-09-08T13:42:29.190855Z",
1179+
"2020-09-08T13:43:30.500000Z",
1180+
"2020-09-08T13:44:31.999999Z",
1181+
],
1182+
Some("America/Sao_Paulo".into()),
1183+
"minute",
1184+
vec![
1185+
"2020-09-08T13:42:00.000000Z",
1186+
"2020-09-08T13:43:00.000000Z",
1187+
"2020-09-08T13:44:00.000000Z",
1188+
],
1189+
),
1190+
// Test with None (no timezone)
1191+
(
1192+
vec![
1193+
"2020-09-08T13:42:29.190855Z",
1194+
"2020-09-08T13:43:30.500000Z",
1195+
"2020-09-08T13:44:31.999999Z",
1196+
],
1197+
None,
1198+
"minute",
1199+
vec![
1200+
"2020-09-08T13:42:00.000000Z",
1201+
"2020-09-08T13:43:00.000000Z",
1202+
"2020-09-08T13:44:00.000000Z",
1203+
],
1204+
),
1205+
// Test millisecond granularity
1206+
(
1207+
vec![
1208+
"2020-09-08T13:42:29.190855Z",
1209+
"2020-09-08T13:42:29.191999Z",
1210+
"2020-09-08T13:42:29.192500Z",
1211+
],
1212+
Some("Asia/Kolkata".into()),
1213+
"millisecond",
1214+
vec![
1215+
"2020-09-08T19:12:29.190000+05:30",
1216+
"2020-09-08T19:12:29.191000+05:30",
1217+
"2020-09-08T19:12:29.192000+05:30",
1218+
],
1219+
),
1220+
];
1221+
1222+
cases
1223+
.iter()
1224+
.for_each(|(original, tz_opt, granularity, expected)| {
1225+
let input = original
1226+
.iter()
1227+
.map(|s| Some(string_to_timestamp_nanos(s).unwrap()))
1228+
.collect::<TimestampNanosecondArray>()
1229+
.with_timezone_opt(tz_opt.clone());
1230+
let right = expected
1231+
.iter()
1232+
.map(|s| Some(string_to_timestamp_nanos(s).unwrap()))
1233+
.collect::<TimestampNanosecondArray>()
1234+
.with_timezone_opt(tz_opt.clone());
1235+
let batch_len = input.len();
1236+
let arg_fields = vec![
1237+
Field::new("a", DataType::Utf8, false).into(),
1238+
Field::new("b", input.data_type().clone(), false).into(),
1239+
];
1240+
let args = datafusion_expr::ScalarFunctionArgs {
1241+
args: vec![
1242+
ColumnarValue::Scalar(ScalarValue::from(*granularity)),
1243+
ColumnarValue::Array(Arc::new(input)),
1244+
],
1245+
arg_fields,
1246+
number_rows: batch_len,
1247+
return_field: Field::new(
1248+
"f",
1249+
DataType::Timestamp(TimeUnit::Nanosecond, tz_opt.clone()),
1250+
true,
1251+
)
1252+
.into(),
1253+
config_options: Arc::new(ConfigOptions::default()),
1254+
};
1255+
let result = DateTruncFunc::new().invoke_with_args(args).unwrap();
1256+
if let ColumnarValue::Array(result) = result {
1257+
assert_eq!(
1258+
result.data_type(),
1259+
&DataType::Timestamp(TimeUnit::Nanosecond, tz_opt.clone()),
1260+
"Failed for granularity: {granularity}, timezone: {tz_opt:?}"
1261+
);
1262+
let left = as_primitive_array::<TimestampNanosecondType>(&result);
1263+
assert_eq!(
1264+
left, &right,
1265+
"Failed for granularity: {granularity}, timezone: {tz_opt:?}"
1266+
);
1267+
} else {
1268+
panic!("unexpected column type");
1269+
}
1270+
});
1271+
}
10971272
}

datafusion/sqllogictest/test_files/timestamps.slt

Lines changed: 34 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,9 @@ create table ts_data_millis as select arrow_cast(ts / 1000000, 'Timestamp(Millis
4545
statement ok
4646
create table ts_data_secs as select arrow_cast(ts / 1000000000, 'Timestamp(Second, None)') as ts, value from ts_data;
4747

48+
statement ok
49+
create table ts_data_micros_kolkata as select arrow_cast(ts / 1000, 'Timestamp(Microsecond, Some("Asia/Kolkata"))') as ts, value from ts_data;
50+
4851

4952
##########
5053
## Current date Tests
@@ -1873,27 +1876,6 @@ true false true true
18731876

18741877

18751878

1876-
##########
1877-
## Common timestamp data
1878-
##########
1879-
1880-
statement ok
1881-
drop table ts_data
1882-
1883-
statement ok
1884-
drop table ts_data_nanos
1885-
1886-
statement ok
1887-
drop table ts_data_micros
1888-
1889-
statement ok
1890-
drop table ts_data_millis
1891-
1892-
statement ok
1893-
drop table ts_data_secs
1894-
1895-
1896-
18971879
##########
18981880
## Timezone impact on scalar functions
18991881
#
@@ -3703,3 +3685,34 @@ SELECT
37033685
FROM (SELECT CAST('2005-09-10 13:31:00 +02:00' AS timestamp with time zone) AS a)
37043686
----
37053687
Timestamp(ns, "+00") 2005-09-10T11:31:00Z 2005-09-10T11:31:00Z 2005-09-10T11:31:00Z 2005-09-10T11:31:00Z
3688+
3689+
query P
3690+
SELECT
3691+
date_trunc('millisecond', ts)
3692+
FROM ts_data_micros_kolkata
3693+
----
3694+
2020-09-08T19:12:29.190+05:30
3695+
2020-09-08T18:12:29.190+05:30
3696+
2020-09-08T17:12:29.190+05:30
3697+
3698+
##########
3699+
## Common timestamp data
3700+
##########
3701+
3702+
statement ok
3703+
drop table ts_data
3704+
3705+
statement ok
3706+
drop table ts_data_nanos
3707+
3708+
statement ok
3709+
drop table ts_data_micros
3710+
3711+
statement ok
3712+
drop table ts_data_millis
3713+
3714+
statement ok
3715+
drop table ts_data_secs
3716+
3717+
statement ok
3718+
drop table ts_data_micros_kolkata

0 commit comments

Comments
 (0)