Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] support kyuubi as a datasource #3375

Draft
wants to merge 1 commit into
base: dev
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,6 @@ public Result<Void> checkHeartBeatByDataSourceId(@RequestParam Integer id) {
* @param id {@link Integer}
* @return {@link Result}< {@link List}< {@link Schema}>>
*/
@Cacheable(cacheNames = "metadata_schema", key = "#id")
@GetMapping("/getSchemasAndTables")
@ApiOperation("Get All Schemas And Tables")
@ApiImplicitParam(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@
Dialect.DORIS,
Dialect.PHOENIX,
Dialect.STAR_ROCKS,
Dialect.PRESTO
Dialect.PRESTO,
Dialect.KYUUBI
})
public class CommonSqlTask extends BaseTask {

Expand Down
3 changes: 3 additions & 0 deletions dinky-admin/src/main/java/org/dinky/utils/PaimonUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,9 @@ public static <T> void write(String table, List<T> dataList, Class<?> clazz) {
DataType type = dataField.type();
String fieldName = StrUtil.toCamelCase(dataField.name());
Object fieldValue = ReflectUtil.getFieldValue(t, fieldName);
if (fieldValue == null){
continue;
}
try {
// TODO BinaryWriter.write已被废弃,后续可以考虑改成这种方式
// BinaryWriter.createValueSetter(type).setValue(writer, i, fieldValue);
Expand Down
8 changes: 8 additions & 0 deletions dinky-assembly/src/main/assembly/package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,14 @@
<include>dinky-metadata-presto-${project.version}.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.parent.basedir}/dinky-metadata/dinky-metadata-kyuubi/target
</directory>
<outputDirectory>lib</outputDirectory>
<includes>
<include>dinky-metadata-kyuubi-${project.version}.jar</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.parent.basedir}/dinky-alert/dinky-alert-dingtalk/target
</directory>
Expand Down
1 change: 1 addition & 0 deletions dinky-common/src/main/java/org/dinky/config/Dialect.java
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ public enum Dialect {
HIVE("Hive"),
STAR_ROCKS("StarRocks"),
PRESTO("Presto"),
KYUUBI("Kyuubi"),
KUBERNETES_APPLICATION("KubernetesApplication");

private String value;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,10 @@ public String test() {
return CommonConstant.HEALTHY;
}

public String getCustomDbType(){
return "hive";
}

public DruidDataSource createDataSource() throws SQLException {
if (null == dataSource) {
synchronized (this.getClass()) {
Expand Down Expand Up @@ -621,8 +625,15 @@ public JdbcSelectResult query(String sql, Integer limit) {
public JdbcSelectResult executeSql(String sql, Integer limit) {
// TODO 改为ProcessStep注释
log.info("Start parse sql...");
String dbType = config.getType().toLowerCase();

//todo 这里暂时不知道怎么处理好,sqlutils 中的数据库类型不支持 kyuubi 不支持spark, 而config.getType() 被两个地方用到,一处是这里,另一处是初始化driver的时候,需要传递driver的名字,
//其他的数据源 driver的名字就是数据源类型也被 druid sqlutils 支持的,但是 kyuubi 特殊,driver 名和数据库类型不同
if (config.getType().equalsIgnoreCase("kyuubi")){
dbType = this.getCustomDbType();
}
List<SQLStatement> stmtList =
SQLUtils.parseStatements(sql, config.getType().toLowerCase());
SQLUtils.parseStatements(sql, dbType);
log.info(CharSequenceUtil.format("A total of {} statement have been Parsed.", stmtList.size()));
List<Object> resList = new ArrayList<>();
JdbcSelectResult result = JdbcSelectResult.buildResult();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,11 @@ public enum DriverType {
PHOENIX("Phoenix"),
GREENPLUM("Greenplum"),
HIVE("Hive"),
KYUUBI("Kyuubi"),
PRESTO("Presto");



public final String value;

DriverType(String value) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,11 @@
import java.util.List;
import java.util.Map;

public class HiveDriver extends AbstractJdbcDriver implements Driver {
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class HiveDriver extends AbstractJdbcDriver implements Driver {
protected static final Logger logger = LoggerFactory.getLogger(HiveDriver.class);
@Override
public Table getTable(String schemaName, String tableName) {
List<Table> tables = listTables(schemaName);
Expand Down Expand Up @@ -75,6 +78,14 @@ public List<Table> listTables(String schemaName) {
execute(String.format(HiveConstant.USE_DB, schemaName));
preparedStatement = conn.get().prepareStatement(sql);
results = preparedStatement.executeQuery();

ResultSetMetaData rsmd = results.getMetaData();
int columnCount = rsmd.getColumnCount();
for (int i = 1; i <= columnCount; i++ ) {
String name = rsmd.getColumnName(i);
logger.info("listTables {}",name);
}

ResultSetMetaData metaData = results.getMetaData();
List<String> columnList = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
Expand Down Expand Up @@ -124,7 +135,15 @@ public List<Schema> listSchemas() {
try {
preparedStatement = conn.get().prepareStatement(schemasSql);
results = preparedStatement.executeQuery();
ResultSetMetaData rsmd = results.getMetaData();
int columnCount = rsmd.getColumnCount();
for (int i = 1; i <= columnCount; i++ ) {
String name = rsmd.getColumnName(i);
logger.info("xxxlistSchemas {}",name);
}

while (results.next()) {

String schemaName = results.getString(getDBQuery().schemaName());
if (Asserts.isNotNullString(schemaName)) {
Schema schema = new Schema(schemaName);
Expand Down Expand Up @@ -153,6 +172,15 @@ public List<Column> listColumns(String schemaName, String tableName) {
preparedStatement = conn.get().prepareStatement(tableFieldsSql);
results = preparedStatement.executeQuery();
ResultSetMetaData metaData = results.getMetaData();

ResultSetMetaData rsmd = results.getMetaData();
int columnCount = rsmd.getColumnCount();
for (int i = 1; i <= columnCount; i++ ) {
String name = rsmd.getColumnName(i);
logger.info("listColumns {}",name);
}


List<String> columnList = new ArrayList<>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
columnList.add(metaData.getColumnLabel(i));
Expand Down
102 changes: 102 additions & 0 deletions dinky-metadata/dinky-metadata-kyuubi/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.dinky</groupId>
<artifactId>dinky-metadata</artifactId>
<version>${revision}</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>dinky-metadata-kyuubi</artifactId>

<packaging>jar</packaging>

<name>Dinky : Metadata : Kyuubi</name>

<properties>
<hive.version>2.3.9</hive.version>
</properties>

<dependencies>
<dependency>
<groupId>org.dinky</groupId>
<artifactId>dinky-metadata-base</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-nop</artifactId>
<version>1.6.1</version>
</dependency>

<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.4</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

kyuubi has its own JDBC driver which is derived from Hive's one, it has zero-deps and fixes many Hive bugs.

<version>${hive.version}</version>
<!-- If you use cdh hive comment out the previous line to open the next line and open the repositories below-->
<!-- <version>2.1.1-cdh6.3.2</version>-->
<scope>${scope.runtime}</scope>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-service</artifactId>
<version>${hive.version}</version>
<!-- If you use cdh hive comment out the previous line to open the next line and open the repositories below -->
<!-- <version>2.1.1-cdh6.3.2</version>-->
<scope>${scope.runtime}</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
</exclusion>
</exclusions>
</dependency>

</dependencies>

<!-- <repositories>-->
<!-- <repository>-->
<!-- <id>cloudera</id>-->
<!-- <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>-->
<!-- </repository>-->
<!-- </repositories>-->

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package org.dinky.metadata.constant;

public interface KyuubiConstant {

/** 查询所有database */
String QUERY_ALL_DATABASE = " show databases";
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I didn't have a deep look at where it is used, but generally, you should rely on standard JDBC API instead of SQL dialect to retrieve such metadata.

/** 查询所有schema下的所有表 */
String QUERY_ALL_TABLES_BY_SCHEMA = "show tables";
/** 扩展信息Key */
String DETAILED_TABLE_INFO = "Detailed Table Information";
/** 查询指定schema.table的扩展信息 */
String QUERY_TABLE_SCHEMA_EXTENED_INFOS = " describe extended `%s`.`%s`";
/** 查询指定schema.table的信息 列 列类型 列注释 */
String QUERY_TABLE_SCHEMA = " describe `%s`.`%s`";
/** 使用 DB */
String USE_DB = "use `%s`";
/** 只查询指定schema.table的列名 */
String QUERY_TABLE_COLUMNS_ONLY = "show columns in `%s`.`%s`";
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package org.dinky.metadata.constant;

public interface TrinoEngineConstant {

/** 查询所有database */
String QUERY_ALL_DATABASE = "show catalogs";
/** 查询某个schema下的所有表 */
String QUERY_ALL_TABLES_BY_SCHEMA = "show tables from %s";
/** 查询指定schema.table的信息 列 列类型 列注释 */
String QUERY_TABLE_SCHEMA = " describe %s.%s";
/** 只查询指定schema.table的列名 */
String QUERY_TABLE_COLUMNS_ONLY = "show schemas from %s";
/** 查询schema列名 */
String SCHEMA = "SCHEMA";
/** 需要排除的catalog */
String EXTRA_SCHEMA = "system";
/** 需要排除的schema */
String EXTRA_DB = "information_schema";
}
Loading
Loading