Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions fe/fe-core/src/main/java/org/apache/doris/catalog/JdbcTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -100,6 +101,9 @@ public class JdbcTable extends Table {
private boolean connectionPoolKeepAlive;

private ExternalFunctionRules functionRules;
// This is used for edit log
@SerializedName("frs")
private String functionRulesString;

static {
Map<String, TOdbcTableType> tempMap = new CaseInsensitiveMap();
Expand Down Expand Up @@ -381,8 +385,9 @@ private void validate(Map<String, String> properties) throws DdlException {

private void checkAndSetExternalFunctionRules(Map<String, String> properties) throws DdlException {
ExternalFunctionRules.check(properties.getOrDefault(JdbcResource.FUNCTION_RULES, ""));
this.functionRules = ExternalFunctionRules.create(jdbcTypeName,
properties.getOrDefault(JdbcResource.FUNCTION_RULES, ""));
String functionRulesString = properties.getOrDefault(JdbcResource.FUNCTION_RULES, "");
this.functionRules = ExternalFunctionRules.create(jdbcTypeName, functionRulesString);
this.functionRulesString = functionRulesString;
}

/**
Expand Down Expand Up @@ -491,4 +496,11 @@ public void setExternalFunctionRules(ExternalFunctionRules functionRules) {
public ExternalFunctionRules getExternalFunctionRules() {
return functionRules;
}

@Override
public void gsonPostProcess() throws IOException {
super.gsonPostProcess();
functionRules = ExternalFunctionRules.create(jdbcTypeName, Strings.nullToEmpty(functionRulesString));
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,8 @@ public void afterDropTable(String dbName, String tblName) {
if (db.isPresent()) {
db.get().setUnInitialized(true);
}
LOG.info("after drop table {}.{}.{}. is db present: {}",
dorisCatalog.getName(), dbName, tblName, db.isPresent());
}

private void performDropTable(String remoteDbName, String remoteTblName, boolean ifExists) throws DdlException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ public synchronized long write(short op, Writable writable) throws IOException {
MetricRepo.COUNTER_CURRENT_EDIT_LOG_SIZE_BYTES.increase((long) theData.getSize());
}
if (LOG.isDebugEnabled() || theData.getSize() > (1 << 20)) {
LOG.info("opCode = {}, journal size = {}", op, theData.getSize());
LOG.info("opCode = {}, journal size = {}, log id: {}", op, theData.getSize(), id);
if (MetricRepo.isInit) {
MetricRepo.COUNTER_LARGE_EDIT_LOG.increase(1L);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ test_iceberg_case_sensibility_hadoop ICEBERG_HADOOP_CASE_DB2 case_tbl21 k1 1 \N
-- !sql18 --
4

-- !sql_show --
case_tbl22

-- !sql21 --
5

Expand Down Expand Up @@ -159,6 +162,9 @@ test_iceberg_case_sensibility_hadoop ICEBERG_HADOOP_CASE_DB2 case_tbl21 k1 1 \N
-- !sql18 --
5

-- !sql_show --
case_tbl22

-- !sql21 --
5

Expand Down Expand Up @@ -254,6 +260,9 @@ test_iceberg_case_sensibility_hadoop ICEBERG_HADOOP_CASE_DB2 case_tbl21 k1 1 \N
-- !sql18 --
5

-- !sql_show --
case_tbl22

-- !sql21 --
5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ test_iceberg_case_sensibility_rest ICEBERG_REST_CASE_DB2 case_tbl21 k1 1 \N YES
-- !sql18 --
4

-- !sql_show --
case_tbl22

-- !sql21 --
5

Expand Down Expand Up @@ -159,6 +162,9 @@ test_iceberg_case_sensibility_rest ICEBERG_REST_CASE_DB2 case_tbl21 k1 1 \N YES
-- !sql18 --
5

-- !sql_show --
case_tbl22

-- !sql21 --
5

Expand Down Expand Up @@ -254,6 +260,9 @@ test_iceberg_case_sensibility_rest ICEBERG_REST_CASE_DB2 case_tbl21 k1 1 \N YES
-- !sql18 --
5

-- !sql_show --
case_tbl22

-- !sql21 --
5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ suite("test_iceberg_hadoop_case_sensibility", "p0,external,doris,external_docker
exception "Table [case_tbl22] does not exist in database [iceberg_hadoop_case_db2]"
}
sql """create table iceberg_hadoop_case_db2.case_tbl22 (k1 int);""" // recreate
sql """show tables from iceberg_hadoop_case_db2 like "%case_tbl22%""""
qt_sql_show """show tables from iceberg_hadoop_case_db2 like "%case_tbl22%""""

sql """insert into iceberg_hadoop_case_db2.case_tbl22 values(5);"""
order_qt_sql21 """select * from iceberg_hadoop_case_db2.case_tbl22;"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ suite("test_iceberg_rest_case_sensibility", "p0,external,doris,external_docker,e
exception "Table [case_tbl22] does not exist in database [iceberg_rest_case_db2]"
}
sql """create table iceberg_rest_case_db2.case_tbl22 (k1 int);""" // recreate
sql """show tables from iceberg_rest_case_db2 like "%case_tbl22%""""
qt_sql_show """show tables from iceberg_rest_case_db2 like "%case_tbl22%""""

sql """insert into iceberg_rest_case_db2.case_tbl22 values(5);"""
order_qt_sql21 """select * from iceberg_rest_case_db2.case_tbl22;"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,7 @@ suite("test_iceberg_create_table", "p0,external,doris,external_docker,external_d
String tb1 = db1 + "_tb1"
String tb2 = db1 + "_tb2"

sql """ drop table if exists ${db1}.${tb1} """
sql """ drop table if exists ${db1}.${tb2} """
sql """ drop database if exists ${db1} """
sql """ drop database if exists ${db1} force"""

sql """ create database ${db1} """

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -333,28 +333,28 @@ suite("test_file_tvf_hdfs","external,hive,tvf,external_docker") {
// test create view from tvf and alter view from tvf
uri = "${defaultFS}" + "/user/doris/preinstalled_data/csv_format_test/all_types.csv"
format = "csv"
sql """ DROP VIEW IF EXISTS test_hdfs_tvf_create_view;"""
sql """ DROP VIEW IF EXISTS test_file_tvf_hdfs_create_view;"""
sql """
create view test_hdfs_tvf_create_view as
create view test_file_tvf_hdfs_create_view as
select * from FILE(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""

order_qt_create_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """
order_qt_create_view """ select * from test_file_tvf_hdfs_create_view order by c1 limit 20; """

sql """
alter view test_hdfs_tvf_create_view as
alter view test_file_tvf_hdfs_create_view as
select c1 from FILE(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""

order_qt_alter_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """
order_qt_alter_view """ select * from test_file_tvf_hdfs_create_view order by c1 limit 20; """
} finally {
}
}
Expand Down
12 changes: 6 additions & 6 deletions regression-test/suites/external_table_p0/tvf/upgrade/load.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -30,26 +30,26 @@ suite("test_tvf_upgrade_load", "p0,external,hive,external_docker,external_docker
// test create view from tvf and alter view from tvf
uri = "${defaultFS}" + "/user/doris/preinstalled_data/csv_format_test/all_types.csv"
format = "csv"
sql """ DROP VIEW IF EXISTS test_hdfs_tvf_create_view;"""
sql """ DROP VIEW IF EXISTS test_tvf_upgrade_load_create_view;"""
sql """
create view test_hdfs_tvf_create_view as
create view test_tvf_upgrade_load_create_view as
select * from HDFS(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""
logger.info("View test_hdfs_tvf_create_view created")
logger.info("View test_tvf_upgrade_load_create_view created")


sql """
alter view test_hdfs_tvf_create_view as
alter view test_tvf_upgrade_load_create_view as
select c1 from HDFS(
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1;
"""
logger.info("View test_hdfs_tvf_create_view altered")
logger.info("View test_tvf_upgrade_load_create_view altered")
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ suite("test_tvf_upgrade_test", "p0,external,hive,external_docker,external_docker

String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
order_qt_create_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """
order_qt_create_view """ select * from test_tvf_upgrade_load_create_view order by c1 limit 20; """

order_qt_alter_view """ select * from test_hdfs_tvf_create_view order by c1 limit 20; """
order_qt_alter_view """ select * from test_tvf_upgrade_load_create_view order by c1 limit 20; """
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,7 @@ suite("part_partition_invalid", "p0,external,external_docker") {
);"""

sql """switch ${hive_catalog_name};"""
sql """drop table if exists ${hive_catalog_name}.${hive_database}.${hive_table}"""
sql """ drop database if exists ${hive_database}"""
sql """ drop database if exists ${hive_database} force"""
sql """ create database ${hive_database}"""
sql """use ${hive_database}"""
sql """
Expand Down Expand Up @@ -203,9 +202,7 @@ suite("part_partition_invalid", "p0,external,external_docker") {
mv_rewrite_success(query_sql, mv_name)
order_qt_after_add_data_and_refresh_catalog_and_mv """ ${query_sql}"""

sql """drop table if exists ${hive_catalog_name}.${hive_database}.${hive_table}"""
sql """drop table if exists ${internal_catalog}.${olap_db}.${olap_table}"""
sql """drop database if exists ${hive_catalog_name}.${hive_database}"""
sql """drop database if exists ${hive_catalog_name}.${hive_database} force"""
sql """drop materialized view if exists ${internal_catalog}.${olap_db}.${mv_name};"""
sql """drop catalog if exists ${hive_catalog_name}"""
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,7 @@ suite("single_external_table", "p0,external,hive") {
);"""

sql """switch ${hive_catalog_name};"""
sql """drop table if exists ${hive_catalog_name}.${hive_database}.${hive_table}"""
sql """ drop database if exists ${hive_database}"""
sql """ drop database if exists ${hive_database} force"""
sql """ create database ${hive_database}"""
sql """use ${hive_database}"""
sql """
Expand Down Expand Up @@ -192,7 +191,6 @@ suite("single_external_table", "p0,external,hive") {
sql """ DROP MATERIALIZED VIEW IF EXISTS mv1_5"""


sql """drop table if exists ${hive_catalog_name}.${hive_database}.${hive_table}"""
sql """drop database if exists ${hive_catalog_name}.${hive_database}"""
sql """drop database if exists ${hive_catalog_name}.${hive_database} force"""
sql """drop catalog if exists ${hive_catalog_name}"""
}
Loading