Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

expression, tests: move integration tests in expression pkg to tests/integration #46891

Merged
merged 4 commits into from
Sep 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ coverage.out
*.test.bin
tags
profile.coverprofile
integration_test
mysql_tester
tests/integrationtest/integration-test.out
tests/integrationtest/integrationtest_tidb-server
tests/integrationtest/portgenerator
Expand Down
2 changes: 1 addition & 1 deletion expression/integration_serial_test/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ go_test(
"main_test.go",
],
flaky = True,
shard_count = 50,
shard_count = 49,
deps = [
"//config",
"//expression",
Expand Down
16 changes: 0 additions & 16 deletions expression/integration_serial_test/integration_serial_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3715,22 +3715,6 @@ func TestNoopFunctions(t *testing.T) {
}
}

func TestIssue18674(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)
tk.MustQuery("select -1.0 % -1.0").Check(testkit.Rows("0.0"))
tk.MustExec("use test")
tk.MustExec("drop table if exists t1")
tk.MustExec("create table t1(`pk` int primary key,`col_float_key_signed` float ,key (`col_float_key_signed`))")
tk.MustExec("insert into t1 values (0, null), (1, 0), (2, -0), (3, 1), (-1,-1)")
tk.MustQuery("select * from t1 where ( `col_float_key_signed` % `col_float_key_signed`) IS FALSE").Sort().Check(testkit.Rows("-1 -1", "3 1"))
tk.MustQuery("select `col_float_key_signed` , `col_float_key_signed` % `col_float_key_signed` from t1").Sort().Check(testkit.Rows(
"-1 -0", "0 <nil>", "0 <nil>", "1 0", "<nil> <nil>"))
tk.MustQuery("select `col_float_key_signed` , (`col_float_key_signed` % `col_float_key_signed`) IS FALSE from t1").Sort().Check(testkit.Rows(
"-1 1", "0 0", "0 0", "1 1", "<nil> 0"))
}

func TestJsonObjectCompare(t *testing.T) {
store := testkit.CreateMockStore(t)

Expand Down
121 changes: 0 additions & 121 deletions expression/integration_test/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3254,24 +3254,6 @@ func TestValuesEnum(t *testing.T) {
tk.MustQuery(`select * from t;`).Check(testkit.Rows(`1 b`))
}

func TestIssue9325(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t")
tk.MustExec("create table t(a timestamp) partition by range(unix_timestamp(a)) (partition p0 values less than(unix_timestamp('2019-02-16 14:20:00')), partition p1 values less than (maxvalue))")
tk.MustExec("insert into t values('2019-02-16 14:19:59'), ('2019-02-16 14:20:01')")
result := tk.MustQuery("select * from t where a between timestamp'2019-02-16 14:19:00' and timestamp'2019-02-16 14:21:00'")
require.Len(t, result.Rows(), 2)

tk.MustExec("drop table if exists t")
tk.MustExec("create table t(a timestamp)")
tk.MustExec("insert into t values('2019-02-16 14:19:59'), ('2019-02-16 14:20:01')")
result = tk.MustQuery("select * from t where a < timestamp'2019-02-16 14:21:00'")
result.Check(testkit.Rows("2019-02-16 14:19:59", "2019-02-16 14:20:01"))
}

func TestIssue9710(t *testing.T) {
store := testkit.CreateMockStore(t)

Expand Down Expand Up @@ -4026,20 +4008,6 @@ func TestCTEWithDML(t *testing.T) {
tk.MustQuery("select * from t1").Check(testkit.Rows("1 1", "2 2", "3 3", "4 4", "5 5"))
}

func TestIssue16505(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)
tk.MustExec("use test;")
tk.MustExec("drop table if exists t;")
tk.MustExec("CREATE TABLE t(c varchar(100), index idx(c(100)));")
tk.MustExec("INSERT INTO t VALUES (NULL),('1'),('0'),(''),('aaabbb'),('0abc'),('123e456'),('0.0001deadsfeww');")
tk.MustQuery("select * from t where c;").Sort().Check(testkit.Rows("0.0001deadsfeww", "1", "123e456"))
tk.MustQuery("select /*+ USE_INDEX(t, idx) */ * from t where c;").Sort().Check(testkit.Rows("0.0001deadsfeww", "1", "123e456"))
tk.MustQuery("select /*+ IGNORE_INDEX(t, idx) */* from t where c;").Sort().Check(testkit.Rows("0.0001deadsfeww", "1", "123e456"))
tk.MustExec("drop table t;")
}

func TestIssue16697(t *testing.T) {
store := testkit.CreateMockStore(t)

Expand Down Expand Up @@ -4362,20 +4330,6 @@ func TestIssue11333(t *testing.T) {
tk.MustQuery(`select 0.000000000000000000000000000000000000000000000000000000000000000000000001;`).Check(testkit.Rows("0.000000000000000000000000000000000000000000000000000000000000000000000001"))
}

// The actual results do not agree with the test results, It should be modified after the test suite is updated
func TestIssue17726(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t0")
tk.MustExec("create table t0 (c1 DATE, c2 TIME, c3 DATETIME, c4 TIMESTAMP)")
tk.MustExec("insert into t0 values ('1000-01-01', '-838:59:59', '1000-01-01 00:00:00', '1970-01-01 08:00:01')")
tk.MustExec("insert into t0 values ('9999-12-31', '838:59:59', '9999-12-31 23:59:59', '2038-01-19 11:14:07')")
result := tk.MustQuery("select avg(c1), avg(c2), avg(c3), avg(c4) from t0")
result.Check(testkit.Rows("54995666 0 54995666117979.5 20040110095704"))
}

func TestDatetimeUserVariable(t *testing.T) {
store := testkit.CreateMockStore(t)

Expand Down Expand Up @@ -4526,19 +4480,6 @@ func TestApproximatePercentile(t *testing.T) {
tk.MustQuery("select approx_percentile(a, 10) from t").Check(testkit.Rows("<nil>"))
}

func TestIssue24429(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)

tk.MustExec("set @@sql_mode = ANSI_QUOTES;")
tk.MustExec("use test")
tk.MustExec("drop table if exists t;")
tk.MustExec("create table t (a int);")
tk.MustQuery(`select t."a"=10 from t;`).Check(testkit.Rows())
tk.MustExec("drop table if exists t;")
}

func TestVitessHash(t *testing.T) {
store := testkit.CreateMockStore(t)

Expand Down Expand Up @@ -5376,53 +5317,6 @@ func TestIdentity(t *testing.T) {
tk.MustQuery("SELECT @@identity, LAST_INSERT_ID()").Check(testkit.Rows("3 3"))
}

func TestIssue29417(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t1;")
tk.MustExec("create table t1 (f1 decimal(5,5));")
tk.MustExec("insert into t1 values (-0.12345);")
tk.MustQuery("select concat(f1) from t1;").Check(testkit.Rows("-0.12345"))
}

func TestIssue29513(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustQuery("select '123' union select cast(45678 as char);").Sort().Check(testkit.Rows("123", "45678"))
tk.MustQuery("select '123' union select cast(45678 as char(2));").Sort().Check(testkit.Rows("123", "45"))

tk.MustExec("drop table if exists t")
tk.MustExec("create table t(a int);")
tk.MustExec("insert into t values(45678);")
tk.MustQuery("select '123' union select cast(a as char) from t;").Sort().Check(testkit.Rows("123", "45678"))
tk.MustQuery("select '123' union select cast(a as char(2)) from t;").Sort().Check(testkit.Rows("123", "45"))
}

func TestIssue28739(t *testing.T) {
store := testkit.CreateMockStore(t)

tk := testkit.NewTestKit(t, store)
tk.MustExec(`USE test`)
tk.MustExec("SET time_zone = 'Europe/Vilnius'")
tk.MustQuery("SELECT UNIX_TIMESTAMP('2020-03-29 03:45:00')").Check(testkit.Rows("1585443600"))
tk.MustQuery("SELECT FROM_UNIXTIME(UNIX_TIMESTAMP('2020-03-29 03:45:00'))").Check(testkit.Rows("2020-03-29 04:00:00"))
tk.MustExec(`DROP TABLE IF EXISTS t`)
tk.MustExec(`CREATE TABLE t (dt DATETIME NULL)`)
defer tk.MustExec(`DROP TABLE t`)
// Test the vector implememtation
tk.MustExec(`INSERT INTO t VALUES ('2021-10-31 02:30:00'), ('2021-03-28 02:30:00'), ('2020-10-04 02:15:00'), ('2020-03-29 03:45:00'), (NULL)`)
tk.MustQuery(`SELECT dt, UNIX_TIMESTAMP(dt) FROM t`).Sort().Check(testkit.Rows(
"2020-03-29 03:45:00 1585443600",
"2020-10-04 02:15:00 1601766900",
"2021-03-28 02:30:00 1616891400",
"2021-10-31 02:30:00 1635636600",
"<nil> <nil>"))
}

func TestTimestampAddWithFractionalSecond(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
Expand Down Expand Up @@ -5690,18 +5584,3 @@ func TestIfNullParamMarker(t *testing.T) {
// Should not report 'Data too long for column' error.
tk.MustExec(`execute pr1 using @a,@b;`)
}

func TestIssue40015(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("CREATE TABLE test ( c1 varchar(20));")
tk.MustExec("INSERT INTO test VALUES (101111),(11100),(101111),(101111);")
tk.MustExec("set tidb_enable_vectorized_expression = true;")
tk.MustQuery("SELECT DATE_ADD(c1, INTERVAL 1 DAY_HOUR) from test;").Sort().Check(testkit.Rows(
"2010-11-11 01:00:00",
"2010-11-11 01:00:00",
"2010-11-11 01:00:00",
"<nil>",
))
}
111 changes: 111 additions & 0 deletions tests/integrationtest/r/expression/issues.result
Original file line number Diff line number Diff line change
Expand Up @@ -2517,3 +2517,114 @@ INSERT INTO t1 VALUES (0);
SELECT c1>=CAST('-787360724' AS TIME) FROM t1;
c1>=CAST('-787360724' AS TIME)
1
select '123' union select cast(45678 as char);
123
123
45678
select '123' union select cast(45678 as char(2));
123
123
45
drop table if exists t;
create table t(a int);
insert into t values(45678);
select '123' union select cast(a as char) from t;
123
123
45678
select '123' union select cast(a as char(2)) from t;
123
123
45
SET time_zone='Europe/Vilnius';
SELECT UNIX_TIMESTAMP('2020-03-29 03:45:00');
UNIX_TIMESTAMP('2020-03-29 03:45:00')
1585443600
SELECT FROM_UNIXTIME(UNIX_TIMESTAMP('2020-03-29 03:45:00'));
FROM_UNIXTIME(UNIX_TIMESTAMP('2020-03-29 03:45:00'))
2020-03-29 04:00:00
DROP TABLE IF EXISTS t;
CREATE TABLE t (dt DATETIME NULL);
INSERT INTO t VALUES ('2021-10-31 02:30:00'), ('2021-03-28 02:30:00'), ('2020-10-04 02:15:00'), ('2020-03-29 03:45:00'), (NULL);
SELECT dt, UNIX_TIMESTAMP(dt) FROM t;
dt UNIX_TIMESTAMP(dt)
NULL NULL
2020-03-29 03:45:00 1585443600
2020-10-04 02:15:00 1601766900
2021-03-28 02:30:00 1616891400
2021-10-31 02:30:00 1635636600
SET time_zone=default;
DROP TABLE IF EXISTS test;
CREATE TABLE test ( c1 varchar(20));
INSERT INTO test VALUES (101111),(11100),(101111),(101111);
set tidb_enable_vectorized_expression = true;
SELECT DATE_ADD(c1, INTERVAL 1 DAY_HOUR) from test;
DATE_ADD(c1, INTERVAL 1 DAY_HOUR)
NULL
2010-11-11 01:00:00
2010-11-11 01:00:00
2010-11-11 01:00:00
set tidb_enable_vectorized_expression = default;
drop table if exists t;
create table t(a timestamp) partition by range(unix_timestamp(a)) (partition p0 values less than(unix_timestamp('2019-02-16 14:20:00')), partition p1 values less than (maxvalue));
insert into t values('2019-02-16 14:19:59'), ('2019-02-16 14:20:01');
select * from t where a between timestamp'2019-02-16 14:19:00' and timestamp'2019-02-16 14:21:00';
a
2019-02-16 14:19:59
2019-02-16 14:20:01
drop table if exists t;
create table t(a timestamp);
insert into t values('2019-02-16 14:19:59'), ('2019-02-16 14:20:01');
select * from t where a < timestamp'2019-02-16 14:21:00';
a
2019-02-16 14:19:59
2019-02-16 14:20:01
drop table if exists t;
CREATE TABLE t(c varchar(100), index idx(c(100)));
INSERT INTO t VALUES (NULL),('1'),('0'),(''),('aaabbb'),('0abc'),('123e456'),('0.0001deadsfeww');
select * from t where c;
c
0.0001deadsfeww
1
123e456
select /*+ USE_INDEX(t, idx) */ * from t where c;
c
0.0001deadsfeww
1
123e456
select /*+ IGNORE_INDEX(t, idx) */* from t where c;
c
0.0001deadsfeww
1
123e456
drop table if exists t0;
create table t0 (c1 DATE, c2 TIME, c3 DATETIME, c4 TIMESTAMP);
insert into t0 values ('1000-01-01', '-838:59:59', '1000-01-01 00:00:00', '1970-01-01 08:00:01');
insert into t0 values ('9999-12-31', '838:59:59', '9999-12-31 23:59:59', '2038-01-19 11:14:07');
select avg(c1), avg(c2), avg(c3), avg(c4) from t0;
avg(c1) avg(c2) avg(c3) avg(c4)
54995666.0000 0.0000 54995666117979.5000 20040110095704.0000
select -1.0 % -1.0;
-1.0 % -1.0
0.0
drop table if exists t1;
create table t1(`pk` int primary key,`col_float_key_signed` float ,key (`col_float_key_signed`));
insert into t1 values (0, null), (1, 0), (2, -0), (3, 1), (-1,-1);
select * from t1 where ( `col_float_key_signed` % `col_float_key_signed`) IS FALSE;
pk col_float_key_signed
-1 -1
3 1
select `col_float_key_signed` , `col_float_key_signed` % `col_float_key_signed` from t1;
col_float_key_signed `col_float_key_signed` % `col_float_key_signed`
NULL NULL
-1 -0
0 NULL
0 NULL
1 0
select `col_float_key_signed` , (`col_float_key_signed` % `col_float_key_signed`) IS FALSE from t1;
col_float_key_signed (`col_float_key_signed` % `col_float_key_signed`) IS FALSE
NULL 0
-1 1
0 0
0 0
1 1
76 changes: 76 additions & 0 deletions tests/integrationtest/t/expression/issues.test
Original file line number Diff line number Diff line change
Expand Up @@ -1691,3 +1691,79 @@ drop table if exists t1;
CREATE TABLE t1 (c1 TINYINT(1) UNSIGNED NOT NULL);
INSERT INTO t1 VALUES (0);
SELECT c1>=CAST('-787360724' AS TIME) FROM t1;

# TestIssue29513
-- sorted_result
select '123' union select cast(45678 as char);
-- sorted_result
select '123' union select cast(45678 as char(2));
drop table if exists t;
create table t(a int);
insert into t values(45678);
-- sorted_result
select '123' union select cast(a as char) from t;
-- sorted_result
select '123' union select cast(a as char(2)) from t;

# TestIssue28739
SET time_zone='Europe/Vilnius';
SELECT UNIX_TIMESTAMP('2020-03-29 03:45:00');
SELECT FROM_UNIXTIME(UNIX_TIMESTAMP('2020-03-29 03:45:00'));
DROP TABLE IF EXISTS t;
CREATE TABLE t (dt DATETIME NULL);
INSERT INTO t VALUES ('2021-10-31 02:30:00'), ('2021-03-28 02:30:00'), ('2020-10-04 02:15:00'), ('2020-03-29 03:45:00'), (NULL);
-- sorted_result
SELECT dt, UNIX_TIMESTAMP(dt) FROM t;
SET time_zone=default;

# TestIssue40015
DROP TABLE IF EXISTS test;
CREATE TABLE test ( c1 varchar(20));
INSERT INTO test VALUES (101111),(11100),(101111),(101111);
set tidb_enable_vectorized_expression = true;
-- sorted_result
SELECT DATE_ADD(c1, INTERVAL 1 DAY_HOUR) from test;
set tidb_enable_vectorized_expression = default;

# TestIssue9325
drop table if exists t;
create table t(a timestamp) partition by range(unix_timestamp(a)) (partition p0 values less than(unix_timestamp('2019-02-16 14:20:00')), partition p1 values less than (maxvalue));
insert into t values('2019-02-16 14:19:59'), ('2019-02-16 14:20:01');
-- sorted_result
select * from t where a between timestamp'2019-02-16 14:19:00' and timestamp'2019-02-16 14:21:00';
drop table if exists t;
create table t(a timestamp);
insert into t values('2019-02-16 14:19:59'), ('2019-02-16 14:20:01');
-- sorted_result
select * from t where a < timestamp'2019-02-16 14:21:00';

# TestIssue16505
drop table if exists t;
CREATE TABLE t(c varchar(100), index idx(c(100)));
INSERT INTO t VALUES (NULL),('1'),('0'),(''),('aaabbb'),('0abc'),('123e456'),('0.0001deadsfeww');
-- sorted_result
select * from t where c;
-- sorted_result
select /*+ USE_INDEX(t, idx) */ * from t where c;
-- sorted_result
select /*+ IGNORE_INDEX(t, idx) */* from t where c;

# TestIssue17726
drop table if exists t0;
create table t0 (c1 DATE, c2 TIME, c3 DATETIME, c4 TIMESTAMP);
insert into t0 values ('1000-01-01', '-838:59:59', '1000-01-01 00:00:00', '1970-01-01 08:00:01');
insert into t0 values ('9999-12-31', '838:59:59', '9999-12-31 23:59:59', '2038-01-19 11:14:07');
select avg(c1), avg(c2), avg(c3), avg(c4) from t0;

# TestIssue18674
select -1.0 % -1.0;
drop table if exists t1;
create table t1(`pk` int primary key,`col_float_key_signed` float ,key (`col_float_key_signed`));
insert into t1 values (0, null), (1, 0), (2, -0), (3, 1), (-1,-1);
-- sorted_result
select * from t1 where ( `col_float_key_signed` % `col_float_key_signed`) IS FALSE;
-- sorted_result
select `col_float_key_signed` , `col_float_key_signed` % `col_float_key_signed` from t1;
-- sorted_result
select `col_float_key_signed` , (`col_float_key_signed` % `col_float_key_signed`) IS FALSE from t1;