diff --git a/bin/hplsql.sh b/bin/hplsql.sh new file mode 100755 index 00000000000000..163cfdc1a9a15a --- /dev/null +++ b/bin/hplsql.sh @@ -0,0 +1,104 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -eo pipefail + +curdir="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +if [[ "$(uname -s)" == 'Darwin' ]] && command -v brew &>/dev/null; then + PATH="$(brew --prefix)/opt/gnu-getopt/bin:${PATH}" + export PATH +fi + +DORIS_HOME="$( + cd "${curdir}/.." + pwd +)" +export DORIS_HOME + +# JAVA_OPTS +# LOG_DIR +# PID_DIR +export JAVA_OPTS="-Xmx4096m" +PID_DIR="$( + cd "${curdir}" + pwd +)" +export PID_DIR +if [[ -z "${JAVA_HOME}" ]]; then + JAVA="$(command -v java)" +else + JAVA="${JAVA_HOME}/bin/java" +fi + +if [[ ! -x "${JAVA}" ]]; then + echo "The JAVA_HOME environment variable is not defined correctly" + echo "This environment variable is needed to run this program" + echo "NB: JAVA_HOME should point to a JDK not a JRE" + exit 1 +fi + +# get jdk version, return version as an Integer. +# 1.8 => 8, 13.0 => 13 +jdk_version() { + local java_cmd="${1}" + local result + local IFS=$'\n' + + if [[ -z "${java_cmd}" ]]; then + result=no_java + return 1 + else + local version + # remove \r for Cygwin + version="$("${java_cmd}" -Xms32M -Xmx32M -version 2>&1 | tr '\r' '\n' | grep version | awk '{print $3}')" + version="${version//\"/}" + if [[ "${version}" =~ ^1\. ]]; then + result="$(echo "${version}" | awk -F '.' '{print $2}')" + else + result="$(echo "${version}" | awk -F '.' '{print $1}')" + fi + fi + echo "${result}" + return 0 +} + +final_java_opt="${JAVA_OPTS}" + +# add libs to CLASSPATH +DORIS_FE_JAR= +for f in "${DORIS_HOME}/lib"/*.jar; do + if [[ "${f}" == *"doris-fe.jar" ]]; then + DORIS_FE_JAR="${f}" + continue + fi + CLASSPATH="${f}:${CLASSPATH}" +done + +# make sure the doris-fe.jar is at first order, so that some classed +# with same qualified name can be loaded priority from doris-fe.jar +CLASSPATH="${DORIS_FE_JAR}:${CLASSPATH}" +export CLASSPATH="${CLASSPATH}:${DORIS_HOME}/lib:${DORIS_HOME}/conf" + +if [[ ! -f "/bin/limit" ]]; then + LIMIT='' +else + LIMIT=/bin/limit +fi + +${LIMIT:+${LIMIT}} "${JAVA}" ${final_java_opt:+${final_java_opt}} -XX:-OmitStackTraceInFastThrow -XX:OnOutOfMemoryError="kill -9 %p" org.apache.doris.hplsql.Hplsql ${HELPER:+${HELPER}} ${OPT_VERSION:+${OPT_VERSION}} "$@" + + + + + + hplsql.conn.default + dorisconn + The default connection profile + + + hplsql.conn.dorisconn + com.mysql.cj.jdbc.Driver;jdbc:mysql://fehost:queryport/information_schema;user;password + Doris jdbc connection + + + hplsql.dual.table + + Single row, single column table for internal operations + + + hplsql.insert.values + native + How to execute INSERT VALUES statement: native (default) and select + + + hplsql.onerror + exception + Error handling behavior: exception (default), seterror and stop + + + hplsql.temp.tables + native + Temporary tables: native (default) and managed + + + hplsql.temp.tables.schema + + Schema for managed temporary tables + + + hplsql.temp.tables.location + /tmp/plhql + LOcation for managed temporary tables in HDFS + + + diff --git a/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java b/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java index 0bdf85384d4c98..0fe7d17b6340f3 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java +++ b/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java @@ -21,53 +21,62 @@ // TYPE codes are defined in the file 'mysql/include/mysql_com.h' enum enum_field_types // which is also demostrated in // http://dev.mysql.com/doc/internals/en/com-query-response.html +// typeName from https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-type-conversions.html public enum MysqlColType { - MYSQL_TYPE_DECIMAL(0, "DECIMAL"), - MYSQL_TYPE_TINY(1, "TINY INT"), - MYSQL_TYPE_SHORT(2, "SMALL INT"), - MYSQL_TYPE_LONG(3, "INT"), - MYSQL_TYPE_FLOAT(4, "FLOAT"), - MYSQL_TYPE_DOUBLE(5, "DOUBLE"), - MYSQL_TYPE_NULL(6, "NULL"), - MYSQL_TYPE_TIMESTAMP(7, "TIMESTAMP"), - MYSQL_TYPE_LONGLONG(8, "LONGLONG"), - MYSQL_TYPE_INT24(9, "INT24"), - MYSQL_TYPE_DATE(10, "DATE"), - MYSQL_TYPE_TIME(11, "TIME"), - MYSQL_TYPE_DATETIME(12, "DATETIME"), - MYSQL_TYPE_YEAR(13, "YEAR"), - MYSQL_TYPE_NEWDATE(14, "NEWDATE"), - MYSQL_TYPE_VARCHAR(15, "VARCHAR"), - MYSQL_TYPE_BIT(16, "BIT"), - MYSQL_TYPE_TIMESTAMP2(17, "TIMESTAMP2"), - MYSQL_TYPE_DATETIME2(18, "DATETIME2"), - MYSQL_TYPE_TIME2(19, "TIME2"), - MYSQL_TYPE_JSON(245, "JSON"), - MYSQL_TYPE_NEWDECIMAL(246, "NEW DECIMAL"), - MYSQL_TYPE_ENUM(247, "ENUM"), - MYSQL_TYPE_SET(248, "SET"), - MYSQL_TYPE_TINY_BLOB(249, "TINY BLOB"), - MYSQL_TYPE_MEDIUM_BLOB(250, "MEDIUM BLOB"), - MYSQL_TYPE_LONG_BLOB(251, "LONG BLOB"), - MYSQL_TYPE_BLOB(252, "BLOB"), - MYSQL_TYPE_VARSTRING(253, "VAR STRING"), - MYSQL_TYPE_STRING(254, "STRING"), - MYSQL_TYPE_GEOMETRY(255, "GEOMETRY"), - MYSQL_TYPE_MAP(400, "MAP"); + MYSQL_TYPE_DECIMAL(0, "DECIMAL", "DECIMAL"), + MYSQL_TYPE_TINY(1, "TINYINT", "TINY INT"), + MYSQL_TYPE_SHORT(2, "SMALLINT", "SMALL INT"), + MYSQL_TYPE_LONG(3, "INTEGER", "INT"), + MYSQL_TYPE_FLOAT(4, "FLOAT", "FLOAT"), + MYSQL_TYPE_DOUBLE(5, "DOUBLE", "DOUBLE"), + MYSQL_TYPE_NULL(6, "NULL", "NULL"), + MYSQL_TYPE_TIMESTAMP(7, "TIMESTAMP", "TIMESTAMP"), + MYSQL_TYPE_LONGLONG(8, "BIGINT", "LONGLONG"), + MYSQL_TYPE_INT24(9, "INT24", "INT24"), + MYSQL_TYPE_DATE(10, "DATE", "DATE"), + MYSQL_TYPE_TIME(11, "TIME", "TIME"), + MYSQL_TYPE_DATETIME(12, "DATETIME", "DATETIME"), + MYSQL_TYPE_YEAR(13, "YEAR", "YEAR"), + MYSQL_TYPE_NEWDATE(14, "NEWDATE", "NEWDATE"), + MYSQL_TYPE_VARCHAR(15, "VARCHAR", "VARCHAR"), + MYSQL_TYPE_BIT(16, "BIT", "BIT"), + MYSQL_TYPE_TIMESTAMP2(17, "TIMESTAMP2", "TIMESTAMP2"), + MYSQL_TYPE_DATETIME2(18, "DATETIME2", "DATETIME2"), + MYSQL_TYPE_TIME2(19, "TIME2", "TIME2"), + MYSQL_TYPE_JSON(245, "JSON", "JSON"), + MYSQL_TYPE_NEWDECIMAL(246, "NEWDECIMAL", "NEW DECIMAL"), + MYSQL_TYPE_ENUM(247, "CHAR", "ENUM"), + MYSQL_TYPE_SET(248, "CHAR", "SET"), + MYSQL_TYPE_TINY_BLOB(249, "TINYBLOB", "TINY BLOB"), + MYSQL_TYPE_MEDIUM_BLOB(250, "MEDIUMBLOB", "MEDIUM BLOB"), + MYSQL_TYPE_LONG_BLOB(251, "LONGBLOB", "LONG BLOB"), + MYSQL_TYPE_BLOB(252, "BLOB", "BLOB"), + MYSQL_TYPE_VARSTRING(253, "VARSTRING", "VAR STRING"), + MYSQL_TYPE_STRING(254, "CHAR", "STRING"), + MYSQL_TYPE_GEOMETRY(255, "GEOMETRY", "GEOMETRY"), + MYSQL_TYPE_MAP(400, "MAP", "MAP"); - private MysqlColType(int code, String desc) { + private MysqlColType(int code, String typeName, String desc) { this.code = code; + this.typeName = typeName; this.desc = desc; } // used in network private int code; + + private String typeName; + private String desc; public int getCode() { return code; } + public String getTypeName() { + return typeName; + } + @Override public String toString() { return desc; diff --git a/fe/fe-common/src/main/java/org/apache/doris/catalog/PrimitiveType.java b/fe/fe-common/src/main/java/org/apache/doris/catalog/PrimitiveType.java index a7d2475630d558..761b95ed9b79d1 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/catalog/PrimitiveType.java +++ b/fe/fe-common/src/main/java/org/apache/doris/catalog/PrimitiveType.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.Lists; +import java.sql.Types; import java.util.ArrayList; import java.util.List; @@ -1214,6 +1215,52 @@ public MysqlColType toMysqlType() { } } + public int toJavaSqlType() { + switch (this) { + case BOOLEAN: + return Types.BOOLEAN; + case TINYINT: + return Types.TINYINT; + case SMALLINT: + return Types.SMALLINT; + case INT: + return Types.INTEGER; + case BIGINT: + return Types.BIGINT; + case FLOAT: + return Types.FLOAT; + case DOUBLE: + return Types.DOUBLE; + case TIME: + case TIMEV2: + return Types.TIME; + case DATE: + case DATEV2: + return Types.DATE; + case DATETIME: + case DATETIMEV2: { + if (isTimeType) { + return Types.TIME; + } else { + return Types.DATE; + } + } + case DECIMALV2: + case DECIMAL32: + case DECIMAL64: + case DECIMAL128: + return Types.DECIMAL; + case VARCHAR: + return Types.VARCHAR; + case ARRAY: + return Types.ARRAY; + case STRUCT: + return Types.STRUCT; + default: + return Types.CHAR; + } + } + public int getOlapColumnIndexSize() { switch (this) { case DATE: diff --git a/fe/fe-core/pom.xml b/fe/fe-core/pom.xml index 1b77adb24d748c..584579d746ac6b 100644 --- a/fe/fe-core/pom.xml +++ b/fe/fe-core/pom.xml @@ -624,6 +624,10 @@ under the License. org.mariadb.jdbc mariadb-java-client + + com.mysql + mysql-connector-j + diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/hplsql/Hplsql.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/hplsql/Hplsql.g4 new file mode 100644 index 00000000000000..ac3c0f9c97f0fd --- /dev/null +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/hplsql/Hplsql.g4 @@ -0,0 +1,2037 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 +// and modified by Doris + +// HPL/SQL Procedural SQL Extension Grammar +grammar Hplsql; + +program : block EOF; + +block : ((begin_end_block | stmt) T_GO?)+ ; // Multiple consecutive blocks/statements + +begin_end_block : + declare_block? T_BEGIN block exception_block? block_end + ; + +single_block_stmt : // Single BEGIN END block (but nested blocks are possible) or single statement + T_BEGIN block exception_block? block_end + | stmt T_SEMICOLON? + ; + +block_end : + {!_input.LT(2).getText().equalsIgnoreCase("TRANSACTION")}? T_END + ; + +proc_block : + begin_end_block + | stmt+ T_GO? + ; + +stmt : + assignment_stmt + | allocate_cursor_stmt + | alter_table_stmt + | associate_locator_stmt + | begin_transaction_stmt + | break_stmt + | call_stmt + | collect_stats_stmt + | close_stmt + | cmp_stmt + | copy_from_local_stmt + | copy_stmt + | commit_stmt + | create_database_stmt + | create_function_stmt + | create_index_stmt + | create_local_temp_table_stmt + | create_package_stmt + | create_package_body_stmt + | create_procedure_stmt + | create_table_stmt + | create_table_type_stmt + | declare_stmt + | delete_stmt + | describe_stmt + | drop_stmt + | end_transaction_stmt + | exec_stmt + | exit_stmt + | fetch_stmt + | for_cursor_stmt + | for_range_stmt + | if_stmt + | include_stmt + | insert_stmt + | insert_directory_stmt + | get_diag_stmt + | grant_stmt + | leave_stmt + | map_object_stmt + | merge_stmt + | open_stmt + | print_stmt + | quit_stmt + | raise_stmt + | resignal_stmt + | return_stmt + | rollback_stmt + | select_stmt + | signal_stmt + | summary_stmt + | update_stmt + | use_stmt + | truncate_stmt + | values_into_stmt + | while_stmt + | unconditional_loop_stmt + | label + | hive + | host + | null_stmt + | expr_stmt + | semicolon_stmt // Placed here to allow null statements ;;... + ; + +semicolon_stmt : + T_SEMICOLON + | '@' | '#' | '/' + ; + +exception_block : // Exception block + T_EXCEPTION exception_block_item+ + ; + +exception_block_item : + T_WHEN L_ID T_THEN block ~(T_WHEN | T_END) + ; + +null_stmt : // NULL statement (no operation) + T_NULL + ; + +expr_stmt : // Standalone expression + {!_input.LT(1).getText().equalsIgnoreCase("GO")}? expr + ; + +assignment_stmt : // Assignment statement + T_SET set_session_option + | T_SET? assignment_stmt_item (T_COMMA assignment_stmt_item)* + ; + +assignment_stmt_item : + assignment_stmt_single_item + | assignment_stmt_multiple_item + | assignment_stmt_select_item + | assignment_stmt_collection_item + ; + +assignment_stmt_single_item : + ident T_COLON? T_EQUAL expr + | T_OPEN_P ident T_CLOSE_P T_COLON? T_EQUAL expr + ; + +assignment_stmt_collection_item : + expr_func T_COLON T_EQUAL expr + ; + +assignment_stmt_multiple_item : + T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P T_COLON? T_EQUAL T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P + ; + +assignment_stmt_select_item : + (ident | (T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P)) T_COLON? T_EQUAL T_OPEN_P select_stmt T_CLOSE_P + ; + +allocate_cursor_stmt: + T_ALLOCATE ident T_CURSOR T_FOR ((T_RESULT T_SET) | T_PROCEDURE) ident + ; + +associate_locator_stmt : + T_ASSOCIATE (T_RESULT T_SET)? (T_LOCATOR | T_LOCATORS) T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P T_WITH T_PROCEDURE ident + ; + +begin_transaction_stmt : + T_BEGIN T_TRANSACTION + ; + +break_stmt : + T_BREAK + ; + +call_stmt : + T_CALL (expr_dot | expr_func | ident) + ; + +declare_stmt : // Declaration statement + T_DECLARE declare_stmt_item (T_COMMA declare_stmt_item)* + ; + +declare_block : // Declaration block + T_DECLARE declare_stmt_item T_SEMICOLON (declare_stmt_item T_SEMICOLON)* + ; + +declare_block_inplace : + declare_stmt_item T_SEMICOLON (declare_stmt_item T_SEMICOLON)* + ; + +declare_stmt_item : + declare_cursor_item + | declare_condition_item + | declare_handler_item + | declare_var_item + | declare_temporary_table_item + ; + +declare_var_item : + ident (T_COMMA ident)* T_AS? dtype dtype_len? dtype_attr* dtype_default? + | ident T_CONSTANT T_AS? dtype dtype_len? dtype_default + ; + +declare_condition_item : // Condition declaration + ident T_CONDITION + ; + +declare_cursor_item : // Cursor declaration + (T_CURSOR ident | ident T_CURSOR) (cursor_with_return | cursor_without_return)? (T_IS | T_AS | T_FOR) (select_stmt | expr ) + ; + +cursor_with_return : + T_WITH T_RETURN T_ONLY? (T_TO (T_CALLER | T_CLIENT))? + ; + +cursor_without_return : + T_WITHOUT T_RETURN + ; + +declare_handler_item : // Condition handler declaration + (T_CONTINUE | T_EXIT) T_HANDLER T_FOR (T_SQLEXCEPTION | T_SQLWARNING | T_NOT T_FOUND | ident) single_block_stmt + ; + +declare_temporary_table_item : // DECLARE TEMPORARY TABLE statement + T_GLOBAL? T_TEMPORARY T_TABLE qident create_table_preoptions? create_table_definition + ; + +create_table_stmt : + T_CREATE T_TABLE (T_IF T_NOT T_EXISTS)? table_name create_table_preoptions? create_table_definition + ; + +create_local_temp_table_stmt : + T_CREATE (T_LOCAL T_TEMPORARY | (T_SET | T_MULTISET)? T_VOLATILE) T_TABLE qident create_table_preoptions? create_table_definition + ; + +create_table_definition : + (T_AS? T_OPEN_P select_stmt T_CLOSE_P | T_AS? select_stmt | T_OPEN_P create_table_columns T_CLOSE_P | T_LIKE table_name) create_table_options? + ; + +create_table_columns : + create_table_columns_item (T_COMMA create_table_columns_item)* + ; + +create_table_columns_item : + column_name dtype dtype_len? dtype_attr* create_table_column_inline_cons* + | (T_CONSTRAINT qident)? create_table_column_cons + ; + +create_table_type_stmt : + T_TYPE ident T_IS T_TABLE T_OF tbl_type (T_NOT T_NULL)? T_INDEX T_BY dtype + ; + +tbl_type : + sql_type | dtype + ; + +sql_type : + qident '%' (T_TYPE | T_ROWTYPE) + ; + +column_name : + qident + ; + +create_table_column_inline_cons : + dtype_default + | T_NOT? T_NULL + | T_PRIMARY T_KEY + | T_UNIQUE + | T_REFERENCES table_name T_OPEN_P qident T_CLOSE_P create_table_fk_action* + | T_IDENTITY T_OPEN_P L_INT (T_COMMA L_INT)* T_CLOSE_P + | T_AUTO_INCREMENT + | T_ENABLE + ; + +create_table_column_cons : + T_PRIMARY T_KEY T_CLUSTERED? T_OPEN_P qident (T_ASC | T_DESC)? (T_COMMA qident (T_ASC | T_DESC)?)* T_CLOSE_P T_ENABLE? index_storage_clause? + | T_FOREIGN T_KEY T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P T_REFERENCES table_name T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P create_table_fk_action* + ; + +create_table_fk_action : + T_ON (T_UPDATE | T_DELETE) (T_NO T_ACTION | T_RESTRICT | T_SET T_NULL | T_SET T_DEFAULT | T_CASCADE) + ; + +create_table_preoptions : + create_table_preoptions_item+ + ; + +create_table_preoptions_item : + T_COMMA create_table_preoptions_td_item + | create_table_options_hive_item + ; + +create_table_preoptions_td_item : + T_NO? (T_LOG | T_FALLBACK) + ; + +create_table_options : + create_table_options_item+ + ; + +create_table_options_item : + T_ON T_COMMIT (T_DELETE | T_PRESERVE) T_ROWS + | create_table_options_ora_item + | create_table_options_db2_item + | create_table_options_td_item + | create_table_options_hive_item + | create_table_options_mssql_item + | create_table_options_mysql_item + ; + +create_table_options_ora_item : + T_SEGMENT T_CREATION (T_IMMEDIATE | T_DEFERRED) + | (T_PCTFREE | T_PCTUSED | T_INITRANS | T_MAXTRANS) L_INT + | T_NOCOMPRESS + | (T_LOGGING | T_NOLOGGING) + | T_STORAGE T_OPEN_P (qident | L_INT)+ T_CLOSE_P + | T_TABLESPACE qident + ; + +create_table_options_db2_item : + T_INDEX? T_IN qident + | T_WITH T_REPLACE + | T_DISTRIBUTE T_BY T_HASH T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P + | T_NOT? T_LOGGED + | T_COMPRESS (T_YES | T_NO) + | T_DEFINITION T_ONLY + | T_WITH T_RESTRICT T_ON T_DROP + ; + +create_table_options_td_item : + T_UNIQUE? T_PRIMARY T_INDEX T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P + | T_WITH T_DATA + ; + +create_table_options_hive_item : + create_table_hive_row_format + | T_STORED T_AS qident + ; + +create_table_hive_row_format : + T_ROW T_FORMAT T_DELIMITED create_table_hive_row_format_fields* + ; + +create_table_hive_row_format_fields : + T_FIELDS T_TERMINATED T_BY expr (T_ESCAPED T_BY expr)? + | T_COLLECTION T_ITEMS T_TERMINATED T_BY expr + | T_MAP T_KEYS T_TERMINATED T_BY expr + | T_LINES T_TERMINATED T_BY expr + | T_NULL T_DEFINED T_AS expr + ; + +create_table_options_mssql_item : + T_ON qident + | T_TEXTIMAGE_ON qident + ; + +create_table_options_mysql_item : + T_AUTO_INCREMENT T_EQUAL? expr + | T_COMMENT T_EQUAL? expr + | T_DEFAULT? (T_CHARACTER T_SET | T_CHARSET) T_EQUAL? expr + | T_ENGINE T_EQUAL? expr + ; + +alter_table_stmt : + T_ALTER T_TABLE table_name alter_table_item + ; + +alter_table_item : + alter_table_add_constraint + ; + +alter_table_add_constraint : + T_ADD2 (T_CONSTRAINT qident)? alter_table_add_constraint_item + ; + +alter_table_add_constraint_item : + T_PRIMARY T_KEY T_CLUSTERED? T_OPEN_P qident (T_ASC | T_DESC)? (T_COMMA qident (T_ASC | T_DESC)?)* T_CLOSE_P T_ENABLE? index_storage_clause? + | T_FOREIGN T_KEY T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P T_REFERENCES table_name T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P create_table_fk_action* + | T_DEFAULT expr T_FOR qident + ; + +dtype : // Data types + T_CHAR + | T_CHARACTER + | T_BIGINT + | T_BINARY_DOUBLE + | T_BINARY_FLOAT + | T_BINARY_INTEGER + | T_BIT + | T_DATE + | T_DATETIME + | T_DEC + | T_DECIMAL + | T_DOUBLE T_PRECISION? + | T_FLOAT + | T_INT + | T_INT2 + | T_INT4 + | T_INT8 + | T_INTEGER + | T_NCHAR + | T_NVARCHAR + | T_NUMBER + | T_NUMERIC + | T_PLS_INTEGER + | T_REAL + | T_RESULT_SET_LOCATOR T_VARYING + | T_SIMPLE_FLOAT + | T_SIMPLE_DOUBLE + | T_SIMPLE_INTEGER + | T_SMALLINT + | T_SMALLDATETIME + | T_STRING + | T_SYS_REFCURSOR + | T_TIMESTAMP + | T_TINYINT + | T_VARCHAR + | T_VARCHAR2 + | T_XML + | qident ('%' (T_TYPE | T_ROWTYPE))? // User-defined or derived data type + ; + +dtype_len : // Data type length or size specification + T_OPEN_P (L_INT | T_MAX) (T_CHAR | T_BYTE)? (T_COMMA L_INT)? T_CLOSE_P + ; + +dtype_attr : + T_NOT? T_NULL + | T_CHARACTER T_SET ident + | T_NOT? (T_CASESPECIFIC | T_CS) + ; + +dtype_default : + T_COLON? T_EQUAL expr + | T_WITH? T_DEFAULT expr? + ; + +create_database_stmt : + T_CREATE (T_DATABASE | T_SCHEMA) (T_IF T_NOT T_EXISTS)? expr create_database_option* + ; + +create_database_option : + T_COMMENT expr + | T_LOCATION expr + ; + +create_function_stmt : + (T_ALTER | T_CREATE (T_OR T_REPLACE)? | T_REPLACE)? T_FUNCTION ident create_routine_params? create_function_return (T_AS | T_IS)? declare_block_inplace? single_block_stmt + ; + +create_function_return : + (T_RETURN | T_RETURNS) dtype dtype_len? + ; + +create_package_stmt : + (T_ALTER | T_CREATE (T_OR T_REPLACE)? | T_REPLACE)? T_PACKAGE ident (T_AS | T_IS) package_spec T_END (ident T_SEMICOLON)? + ; + +package_spec : + package_spec_item T_SEMICOLON (package_spec_item T_SEMICOLON)* + ; + +package_spec_item : + declare_stmt_item + | T_FUNCTION ident create_routine_params? create_function_return + | (T_PROCEDURE | T_PROC) ident create_routine_params? + ; + +create_package_body_stmt : + (T_ALTER | T_CREATE (T_OR T_REPLACE)? | T_REPLACE)? T_PACKAGE T_BODY ident (T_AS | T_IS) package_body T_END (ident T_SEMICOLON)? + ; + +package_body : + package_body_item T_SEMICOLON (package_body_item T_SEMICOLON)* + ; + +package_body_item : + declare_stmt_item + | create_function_stmt + | create_procedure_stmt + ; + +create_procedure_stmt : + (T_ALTER | T_CREATE (T_OR T_REPLACE)? | T_REPLACE)? (T_PROCEDURE | T_PROC) ident create_routine_params? create_routine_options? (T_AS | T_IS)? declare_block_inplace? label? proc_block (ident T_SEMICOLON)? + ; + +create_routine_params : + T_OPEN_P T_CLOSE_P + | T_OPEN_P create_routine_param_item (T_COMMA create_routine_param_item)* T_CLOSE_P + | {!_input.LT(1).getText().equalsIgnoreCase("IS") && + !_input.LT(1).getText().equalsIgnoreCase("AS") && + !(_input.LT(1).getText().equalsIgnoreCase("DYNAMIC") && _input.LT(2).getText().equalsIgnoreCase("RESULT")) + }? + create_routine_param_item (T_COMMA create_routine_param_item)* + ; + +create_routine_param_item : + (T_IN | T_OUT | T_INOUT | T_IN T_OUT)? ident dtype dtype_len? dtype_attr* dtype_default? + | ident (T_IN | T_OUT | T_INOUT | T_IN T_OUT)? dtype dtype_len? dtype_attr* dtype_default? + ; + +create_routine_options : + create_routine_option+ + ; +create_routine_option : + T_LANGUAGE T_SQL + | T_SQL T_SECURITY (T_CREATOR | T_DEFINER | T_INVOKER | T_OWNER) + | T_DYNAMIC? T_RESULT T_SETS L_INT + ; + +drop_stmt : // DROP statement + T_DROP T_TABLE (T_IF T_EXISTS)? table_name + | T_DROP T_PACKAGE (T_IF T_EXISTS)? ident + | T_DROP (T_PROCEDURE | T_FUNCTION) (T_IF T_EXISTS)? ident + | T_DROP (T_DATABASE | T_SCHEMA) (T_IF T_EXISTS)? expr + ; + +end_transaction_stmt : + T_END T_TRANSACTION + ; + +exec_stmt : // EXEC, EXECUTE IMMEDIATE statement + (T_EXEC | T_EXECUTE) T_IMMEDIATE? expr (T_OPEN_P expr_func_params T_CLOSE_P | expr_func_params)? (T_INTO L_ID (T_COMMA L_ID)*)? using_clause? + ; + +if_stmt : // IF statement + if_plsql_stmt + | if_tsql_stmt + | if_bteq_stmt + ; + +if_plsql_stmt : + T_IF bool_expr T_THEN block elseif_block* else_block? T_END T_IF + ; + +if_tsql_stmt : + T_IF bool_expr single_block_stmt (T_ELSE single_block_stmt)? + ; + +if_bteq_stmt : + '.' T_IF bool_expr T_THEN single_block_stmt + ; + +elseif_block : + (T_ELSIF | T_ELSEIF) bool_expr T_THEN block + ; + +else_block : + T_ELSE block + ; + +include_stmt : // INCLUDE statement + T_INCLUDE (file_name | expr) + ; + +insert_stmt : // INSERT statement + T_INSERT (T_OVERWRITE T_TABLE | T_INTO T_TABLE?) table_name insert_stmt_cols? (select_stmt | insert_stmt_rows) + ; + +insert_stmt_cols : + T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P + ; + +insert_stmt_rows : + T_VALUES insert_stmt_row (T_COMMA insert_stmt_row)* + ; + +insert_stmt_row: + T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P + ; + +insert_directory_stmt : + T_INSERT T_OVERWRITE T_LOCAL? T_DIRECTORY expr_file expr_select + ; + +exit_stmt : + T_EXIT L_ID? (T_WHEN bool_expr)? + ; + +get_diag_stmt : // GET DIAGNOSTICS statement + T_GET T_DIAGNOSTICS get_diag_stmt_item + ; + +get_diag_stmt_item : + get_diag_stmt_exception_item + | get_diag_stmt_rowcount_item + ; + +get_diag_stmt_exception_item : + T_EXCEPTION L_INT qident T_EQUAL T_MESSAGE_TEXT + ; + +get_diag_stmt_rowcount_item : + qident T_EQUAL T_ROW_COUNT + ; + +grant_stmt : + T_GRANT grant_stmt_item (T_COMMA grant_stmt_item)* T_TO T_ROLE qident + ; + +grant_stmt_item : + T_EXECUTE T_ON T_PROCEDURE qident + ; + +leave_stmt : + T_LEAVE L_ID? + ; + +map_object_stmt : + T_MAP T_OBJECT ident (T_TO ident)? (T_AT ident)? + ; + +open_stmt : // OPEN cursor statement + T_OPEN ident (T_FOR (select_stmt | expr))? + ; + +fetch_stmt : // FETCH cursor statement + T_FETCH T_FROM? ident bulk_collect_clause? T_INTO ident (T_COMMA ident)* fetch_limit? + ; + +fetch_limit: + T_LIMIT expr + ; + +collect_stats_stmt : + T_COLLECT (T_STATISTICS | T_STATS) T_ON table_name collect_stats_clause? + ; + +collect_stats_clause : + T_COLUMN T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P + ; + +close_stmt : // CLOSE cursor statement + T_CLOSE L_ID + ; + +cmp_stmt : // CMP statement + T_CMP (T_ROW_COUNT | T_SUM) cmp_source T_COMMA cmp_source + ; + +cmp_source : + (table_name where_clause? | T_OPEN_P select_stmt T_CLOSE_P) (T_AT qident)? + ; + +copy_from_local_stmt : // COPY FROM LOCAL statement + T_COPY T_FROM T_LOCAL copy_source (T_COMMA copy_source)* T_TO copy_target copy_file_option* + ; + +copy_stmt : // COPY statement + T_COPY (table_name | T_OPEN_P select_stmt T_CLOSE_P) T_TO T_HDFS? copy_target copy_option* + ; + +copy_source : + (file_name | expr) + ; + +copy_target : + (file_name | expr) + ; + +copy_option : + T_AT qident + | T_BATCHSIZE expr + | T_DELIMITER expr + | T_SQLINSERT qident + ; + +copy_file_option : + T_DELETE + | T_IGNORE + | T_OVERWRITE + ; + +commit_stmt : // COMMIT statement + T_COMMIT T_WORK? + ; + +create_index_stmt : // CREATE INDEX statement + T_CREATE T_UNIQUE? T_INDEX qident T_ON table_name T_OPEN_P create_index_col (T_COMMA create_index_col)* T_CLOSE_P + ; + +create_index_col : + qident (T_ASC | T_DESC)? + ; + +index_storage_clause : + index_mssql_storage_clause + ; + +index_mssql_storage_clause : + T_WITH T_OPEN_P qident T_EQUAL qident (T_COMMA qident T_EQUAL qident)* T_CLOSE_P create_table_options_mssql_item* + ; + +print_stmt : // PRINT statement + T_PRINT expr + | T_PRINT T_OPEN_P expr T_CLOSE_P + ; + +quit_stmt : + '.'? T_QUIT expr? + ; + +raise_stmt : + T_RAISE + ; + +resignal_stmt : // RESIGNAL statement + T_RESIGNAL (T_SQLSTATE T_VALUE? expr (T_SET T_MESSAGE_TEXT T_EQUAL expr)? )? + ; + +return_stmt : // RETURN statement + T_RETURN expr? + ; + +rollback_stmt : // ROLLBACK statement + T_ROLLBACK T_WORK? + ; + +set_session_option : + set_doris_session_option + | set_current_schema_option + | set_mssql_session_option + | set_teradata_session_option + ; + +set_doris_session_option : + (T_GLOBAL | T_LOCAL | T_SESSION)? ident T_EQUAL ident + ; + +set_current_schema_option : + ((T_CURRENT? T_SCHEMA) | T_CURRENT_SCHEMA) T_EQUAL? expr + ; + +set_mssql_session_option : + ( T_ANSI_NULLS + | T_ANSI_PADDING + | T_NOCOUNT + | T_QUOTED_IDENTIFIER + | T_XACT_ABORT ) + (T_ON | T_OFF) + ; + +set_teradata_session_option : + T_QUERY_BAND T_EQUAL (expr | T_NONE) T_UPDATE? T_FOR (T_TRANSACTION | T_SESSION) + ; + +signal_stmt : // SIGNAL statement + T_SIGNAL ident + ; + +summary_stmt : // SUMMARY statement + T_SUMMARY (T_TOP expr)? T_FOR (select_stmt | table_name where_clause? (T_LIMIT expr)?) + ; + +truncate_stmt : + T_TRUNCATE T_TABLE? table_name + ; + +use_stmt : // USE statement + T_USE expr + ; + +values_into_stmt : // VALUES INTO statement + T_VALUES T_OPEN_P? expr (T_COMMA expr)* T_CLOSE_P? T_INTO T_OPEN_P? ident (T_COMMA ident)* T_CLOSE_P? + ; + +while_stmt : // WHILE loop statement + T_WHILE bool_expr (T_DO | T_LOOP | T_THEN | T_BEGIN) block T_END (T_WHILE | T_LOOP)? + ; + +unconditional_loop_stmt : // LOOP .. END LOOP + T_LOOP block T_END T_LOOP + ; + +for_cursor_stmt : // FOR (cursor) statement + T_FOR L_ID T_IN T_OPEN_P? select_stmt T_CLOSE_P? T_LOOP block T_END T_LOOP + ; + +for_range_stmt : // FOR (Integer range) statement + T_FOR L_ID T_IN T_REVERSE? expr T_DOT2 expr ((T_BY | T_STEP) expr)? T_LOOP block T_END T_LOOP + ; + +label : + L_LABEL + | T_LESS T_LESS L_ID T_GREATER T_GREATER + ; + +using_clause : // USING var,... clause + T_USING expr (T_COMMA expr)* + ; + +select_stmt : // SELECT statement + cte_select_stmt? fullselect_stmt + ; + +cte_select_stmt : + T_WITH cte_select_stmt_item (T_COMMA cte_select_stmt_item)* + ; + +cte_select_stmt_item : + qident cte_select_cols? T_AS T_OPEN_P fullselect_stmt T_CLOSE_P + ; + +cte_select_cols : + T_OPEN_P qident (T_COMMA qident)* T_CLOSE_P + ; + +fullselect_stmt : + fullselect_stmt_item (fullselect_set_clause fullselect_stmt_item)* + ; + +fullselect_stmt_item : + subselect_stmt + | T_OPEN_P fullselect_stmt T_CLOSE_P + ; + +fullselect_set_clause : + T_UNION T_ALL? + | T_EXCEPT T_ALL? + | T_INTERSECT T_ALL? + ; + +subselect_stmt : + (T_SELECT | T_SEL) select_list into_clause? from_clause? where_clause? group_by_clause? (having_clause | qualify_clause)? order_by_clause? select_options? + ; + +select_list : + select_list_set? select_list_limit? select_list_item (T_COMMA select_list_item)* + ; + +select_list_set : + T_ALL + | T_DISTINCT + ; + +select_list_limit : + T_TOP expr + ; + +select_list_item : + ((qident T_EQUAL)? expr select_list_alias? | select_list_asterisk) + ; + +select_list_alias : + {!_input.LT(1).getText().equalsIgnoreCase("INTO") && !_input.LT(1).getText().equalsIgnoreCase("FROM")}? T_AS? qident + | T_OPEN_P T_TITLE L_S_STRING T_CLOSE_P + ; + +select_list_asterisk : + (L_ID '.')? '*' + ; + +table_row : + ident T_OPEN_P L_INT T_CLOSE_P + ; + +into_clause : + bulk_collect_clause? T_INTO (table_row | ident) (T_COMMA (table_row | ident))* + ; + +bulk_collect_clause : + T_BULK T_COLLECT + ; + +from_clause : + T_FROM from_table_clause (from_join_clause)* + ; + +from_table_clause : + from_table_name_clause + | from_subselect_clause + | from_table_values_clause + ; + +from_table_name_clause : + table_name from_alias_clause? + ; + +from_subselect_clause : + T_OPEN_P select_stmt T_CLOSE_P from_alias_clause? + ; + +from_join_clause : + T_COMMA from_table_clause + | from_join_type_clause from_table_clause T_ON bool_expr + ; + +from_join_type_clause : + T_INNER? T_JOIN + | (T_LEFT | T_RIGHT | T_FULL) T_OUTER? T_JOIN + ; + +from_table_values_clause: + T_TABLE T_OPEN_P T_VALUES from_table_values_row (T_COMMA from_table_values_row)* T_CLOSE_P from_alias_clause? + ; + +from_table_values_row: + expr + | T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P + ; + +from_alias_clause : + {!_input.LT(1).getText().equalsIgnoreCase("EXEC") && + !_input.LT(1).getText().equalsIgnoreCase("EXECUTE") && + !_input.LT(1).getText().equalsIgnoreCase("INNER") && + !_input.LT(1).getText().equalsIgnoreCase("LEFT") && + !_input.LT(1).getText().equalsIgnoreCase("GROUP") && + !_input.LT(1).getText().equalsIgnoreCase("ORDER") && + !_input.LT(1).getText().equalsIgnoreCase("LIMIT") && + !_input.LT(1).getText().equalsIgnoreCase("WITH")}? + T_AS? qident (T_OPEN_P L_ID (T_COMMA L_ID)* T_CLOSE_P)? + ; + +table_name : + qident + ; + +where_clause : + T_WHERE bool_expr + ; + +group_by_clause : + T_GROUP T_BY expr (T_COMMA expr)* + ; + +having_clause : + T_HAVING bool_expr + ; + +qualify_clause : + T_QUALIFY bool_expr + ; + +order_by_clause : + T_ORDER T_BY expr (T_ASC | T_DESC)? (T_COMMA expr (T_ASC | T_DESC)?)* + ; + +select_options : + select_options_item+ + ; + +select_options_item : + T_LIMIT expr + | T_WITH (T_RR | T_RS | T_CS | T_UR) (T_USE T_AND T_KEEP (T_EXCLUSIVE | T_UPDATE | T_SHARE) T_LOCKS)? + ; + +update_stmt : // UPDATE statement + T_UPDATE update_table T_SET update_assignment where_clause? update_upsert? + ; + +update_assignment : + assignment_stmt_item (T_COMMA assignment_stmt_item)* + ; + +update_table : + (table_name from_clause? | T_OPEN_P select_stmt T_CLOSE_P) (T_AS? qident)? + ; + +update_upsert : + T_ELSE insert_stmt + ; + +merge_stmt : // MERGE statement + T_MERGE T_INTO merge_table T_USING merge_table T_ON bool_expr merge_condition+ + ; + +merge_table : + (table_name | (T_OPEN_P select_stmt T_CLOSE_P)) (T_AS? qident)? + ; + +merge_condition : + T_WHEN T_NOT? T_MATCHED (T_AND bool_expr)? T_THEN merge_action + | T_ELSE T_IGNORE + ; + +merge_action : + T_INSERT insert_stmt_cols? T_VALUES insert_stmt_row + | T_UPDATE T_SET assignment_stmt_item (T_COMMA assignment_stmt_item)* where_clause? + | T_DELETE + ; + +delete_stmt : + T_DELETE T_FROM? table_name delete_alias? (where_clause | T_ALL)? + ; + +delete_alias : + {!_input.LT(1).getText().equalsIgnoreCase("ALL")}? + T_AS? qident + ; + +describe_stmt : + (T_DESCRIBE | T_DESC) T_TABLE? table_name + ; + +bool_expr : // Boolean condition + T_NOT? T_OPEN_P bool_expr T_CLOSE_P + | bool_expr bool_expr_logical_operator bool_expr + | bool_expr_atom + ; + +bool_expr_atom : + bool_expr_unary + | bool_expr_binary + | expr + ; + +bool_expr_unary : + expr T_IS T_NOT? T_NULL + | expr T_BETWEEN expr T_AND expr + | T_NOT? T_EXISTS T_OPEN_P select_stmt T_CLOSE_P + | bool_expr_single_in + | bool_expr_multi_in + ; + +bool_expr_single_in : + expr T_NOT? T_IN T_OPEN_P ((expr (T_COMMA expr)*) | select_stmt) T_CLOSE_P + ; + +bool_expr_multi_in : + T_OPEN_P expr (T_COMMA expr)* T_CLOSE_P T_NOT? T_IN T_OPEN_P select_stmt T_CLOSE_P + ; + +bool_expr_binary : + expr bool_expr_binary_operator expr + ; + +bool_expr_logical_operator : + T_AND + | T_OR + ; + +bool_expr_binary_operator : + T_EQUAL + | T_EQUAL2 + | T_NOTEQUAL + | T_NOTEQUAL2 + | T_LESS + | T_LESSEQUAL + | T_GREATER + | T_GREATEREQUAL + | T_NOT? (T_LIKE | T_RLIKE | T_REGEXP) + ; + +expr : + expr interval_item + | expr (T_MUL | T_DIV) expr + | expr (T_ADD | T_SUB) expr + | T_OPEN_P select_stmt T_CLOSE_P + | T_OPEN_P expr T_CLOSE_P + | expr_interval + | expr_concat + | expr_dot + | expr_case + | expr_cursor_attribute + | expr_agg_window_func + | expr_spec_func + | expr_func + | expr_atom + ; + +expr_atom : + date_literal + | timestamp_literal + | bool_literal + | qident + | string + | dec_number + | int_number + | null_const + ; + +expr_interval : + T_INTERVAL expr interval_item + ; +interval_item : + T_DAY + | T_DAYS + | T_MICROSECOND + | T_MICROSECONDS + | T_SECOND + | T_SECONDS + ; + +expr_concat : // String concatenation operator + expr_concat_item (T_PIPE | T_CONCAT) expr_concat_item ((T_PIPE | T_CONCAT) expr_concat_item)* + ; + +expr_concat_item : + T_OPEN_P expr T_CLOSE_P + | expr_case + | expr_agg_window_func + | expr_spec_func + | expr_dot + | expr_func + | expr_atom + ; + +expr_case : // CASE expression + expr_case_simple + | expr_case_searched + ; + +expr_case_simple : + T_CASE expr (T_WHEN expr T_THEN expr)+ (T_ELSE expr)? T_END + ; + +expr_case_searched : + T_CASE (T_WHEN bool_expr T_THEN expr)+ (T_ELSE expr)? T_END + ; + +expr_cursor_attribute : + ident '%' (T_ISOPEN | T_FOUND | T_NOTFOUND) + ; + +expr_agg_window_func : + T_AVG T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause? + | T_COUNT T_OPEN_P ((expr_func_all_distinct? expr) | '*') T_CLOSE_P expr_func_over_clause? + | T_COUNT_BIG T_OPEN_P ((expr_func_all_distinct? expr) | '*') T_CLOSE_P expr_func_over_clause? + | T_CUME_DIST T_OPEN_P T_CLOSE_P expr_func_over_clause + | T_DENSE_RANK T_OPEN_P T_CLOSE_P expr_func_over_clause + | T_FIRST_VALUE T_OPEN_P expr T_CLOSE_P expr_func_over_clause + | T_LAG T_OPEN_P expr (T_COMMA expr (T_COMMA expr)?)? T_CLOSE_P expr_func_over_clause + | T_LAST_VALUE T_OPEN_P expr T_CLOSE_P expr_func_over_clause + | T_LEAD T_OPEN_P expr (T_COMMA expr (T_COMMA expr)?)? T_CLOSE_P expr_func_over_clause + | T_MAX T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause? + | T_MIN T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause? + | T_RANK T_OPEN_P T_CLOSE_P expr_func_over_clause + | T_ROW_NUMBER T_OPEN_P T_CLOSE_P expr_func_over_clause + | T_STDEV T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause? + | T_SUM T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause? + | T_VAR T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause? + | T_VARIANCE T_OPEN_P expr_func_all_distinct? expr T_CLOSE_P expr_func_over_clause? + ; + +expr_func_all_distinct : + T_ALL + | T_DISTINCT + ; + +expr_func_over_clause : + T_OVER T_OPEN_P expr_func_partition_by_clause? order_by_clause? T_CLOSE_P + ; + +expr_func_partition_by_clause : + T_PARTITION T_BY expr (T_COMMA expr)* + ; + +expr_spec_func : + T_ACTIVITY_COUNT + | T_CAST T_OPEN_P expr T_AS dtype dtype_len? T_CLOSE_P + | T_COUNT T_OPEN_P (expr | '*') T_CLOSE_P + | T_CURRENT_DATE | T_CURRENT T_DATE + | (T_CURRENT_TIMESTAMP | T_CURRENT T_TIMESTAMP) (T_OPEN_P expr T_CLOSE_P)? + | T_CURRENT_USER | T_CURRENT T_USER + | T_MAX_PART_STRING T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P + | T_MIN_PART_STRING T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P + | T_MAX_PART_INT T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P + | T_MIN_PART_INT T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P + | T_MAX_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P + | T_MIN_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P + | T_PART_COUNT T_OPEN_P expr (T_COMMA expr T_EQUAL expr)* T_CLOSE_P + | T_PART_LOC T_OPEN_P expr (T_COMMA expr T_EQUAL expr)+ (T_COMMA expr)? T_CLOSE_P + | T_TRIM T_OPEN_P expr T_CLOSE_P + | T_SUBSTRING T_OPEN_P expr T_FROM expr (T_FOR expr)? T_CLOSE_P + | T_SYSDATE + | T_USER + ; + +expr_func : + ident T_OPEN_P expr_func_params? T_CLOSE_P + ; + +expr_dot : + expr_dot_method_call | expr_dot_property_access + ; + +expr_dot_method_call : + (ident | expr_func) T_DOT expr_func + ; + +expr_dot_property_access : + (ident | expr_func) T_DOT ident + ; + +expr_func_params : + func_param (T_COMMA func_param)* + ; + +func_param : + {!_input.LT(1).getText().equalsIgnoreCase("INTO")}? (ident T_EQUAL T_GREATER?)? expr + ; + +expr_select : + select_stmt + | expr + ; + +expr_file : + file_name + | expr + ; + +hive : + T_HIVE hive_item* + ; + +hive_item : + T_SUB qident expr + | T_SUB qident L_ID T_EQUAL expr + | T_SUB qident + ; + +host : + '!' host_cmd ';' // OS command + | host_stmt + ; + +host_cmd : + .*? + ; + +host_stmt : + T_HOST expr + ; + +file_name : + L_FILE | ('/' | '.' '/')? qident ('/' qident)* + ; + +date_literal : // DATE 'YYYY-MM-DD' literal + T_DATE string + ; + +timestamp_literal : // TIMESTAMP 'YYYY-MM-DD HH:MI:SS.FFF' literal + T_TIMESTAMP string + ; + +ident : + '-'? (L_ID | non_reserved_words) + ; + +qident : // qualified identifier e.g: table_name.col_name or db_name._table_name + ident ('.'ident)* + ; + +string : // String literal (single or double quoted) + L_S_STRING # single_quotedString + | L_D_STRING # double_quotedString + ; + +int_number : // Integer (positive or negative) + ('-' | '+')? L_INT + ; + +dec_number : // Decimal number (positive or negative) + ('-' | '+')? L_DEC + ; + +bool_literal : // Boolean literal + T_TRUE + | T_FALSE + ; + +null_const : // NULL constant + T_NULL + ; + +non_reserved_words : // Tokens that are not reserved words and can be used as identifiers + T_ACTION + | T_ACTIVITY_COUNT + | T_ADD2 + | T_ALL + | T_ALLOCATE + | T_ALTER + | T_AND + | T_ANSI_NULLS + | T_ANSI_PADDING + | T_AS + | T_ASC + | T_ASSOCIATE + | T_AT + | T_AUTO_INCREMENT + | T_AVG + | T_BATCHSIZE + | T_BEGIN + | T_BETWEEN + | T_BIGINT + | T_BINARY_DOUBLE + | T_BINARY_FLOAT + | T_BIT + | T_BODY + | T_BREAK + | T_BULK + | T_BY + | T_BYTE + | T_CALL + | T_CALLER + | T_CASCADE + | T_CASE + | T_CASESPECIFIC + | T_CAST + | T_CHAR + | T_CHARACTER + | T_CHARSET + | T_CLIENT + | T_CLOSE + | T_CLUSTERED + | T_CMP + | T_COLLECT + | T_COLLECTION + | T_COLUMN + | T_COMMENT + | T_COMPRESS + | T_CONSTANT + | T_COPY + | T_COMMIT + | T_CONCAT + | T_CONDITION + | T_CONSTRAINT + | T_CONTINUE + | T_COUNT + | T_COUNT_BIG + | T_CREATE + | T_CREATION + | T_CREATOR + | T_CS + | T_CUME_DIST + | T_CURRENT + | T_CURRENT_DATE + | T_CURRENT_SCHEMA + | T_CURRENT_TIMESTAMP + | T_CURRENT_USER + | T_CURSOR + | T_DATA + | T_DATABASE + | T_DATE + | T_DATETIME + | T_DAY + | T_DAYS + | T_DEC + | T_DECIMAL + | T_DECLARE + | T_DEFAULT + | T_DEFERRED + | T_DEFINED + | T_DEFINER + | T_DEFINITION + | T_DELETE + | T_DELIMITED + | T_DELIMITER + | T_DENSE_RANK + | T_DESC + | T_DESCRIBE + | T_DIAGNOSTICS + | T_DIR + | T_DIRECTORY + | T_DISTINCT + | T_DISTRIBUTE + | T_DO + | T_DOUBLE + | T_DROP + | T_DYNAMIC + // T_ELSE reserved word + // T_ELSEIF reserved word + // T_ELSIF reserved word + // T_END reserved word + | T_ENABLE + | T_ENGINE + | T_ESCAPED + | T_EXCEPT + | T_EXEC + | T_EXECUTE + | T_EXCEPTION + | T_EXCLUSIVE + | T_EXISTS + | T_EXIT + | T_FALLBACK + | T_FALSE + | T_FETCH + | T_FIELDS + | T_FILE + | T_FILES + | T_FIRST_VALUE + | T_FLOAT + | T_FOR + | T_FOREIGN + | T_FORMAT + | T_FOUND + | T_FROM + | T_FULL + | T_FUNCTION + | T_GET + | T_GLOBAL + | T_GO + | T_GRANT + | T_GROUP + | T_HANDLER + | T_HASH + | T_HAVING + | T_HDFS + | T_HIVE + | T_HOST + | T_IDENTITY + | T_IF + | T_IGNORE + | T_IMMEDIATE + | T_IN + | T_INCLUDE + | T_INDEX + | T_INITRANS + | T_INNER + | T_INOUT + | T_INSERT + | T_INT + | T_INT2 + | T_INT4 + | T_INT8 + | T_INTEGER + | T_INTERSECT + | T_INTERVAL + | T_INTO + | T_INVOKER + | T_ITEMS + | T_IS + | T_ISOPEN + | T_JOIN + | T_KEEP + | T_KEY + | T_KEYS + | T_LAG + | T_LANGUAGE + | T_LAST_VALUE + | T_LEAD + | T_LEAVE + | T_LEFT + | T_LIKE + | T_LIMIT + | T_LINES + | T_LOCAL + | T_LOCATION + | T_LOCATOR + | T_LOCATORS + | T_LOCKS + | T_LOG + | T_LOGGED + | T_LOGGING + | T_LOOP + | T_MAP + | T_MATCHED + | T_MAX + | T_MAXTRANS + | T_MERGE + | T_MESSAGE_TEXT + | T_MICROSECOND + | T_MICROSECONDS + | T_MIN + | T_MULTISET + | T_NCHAR + | T_NEW + | T_NVARCHAR + | T_NO + | T_NOCOMPRESS + | T_NOCOUNT + | T_NOLOGGING + | T_NONE + | T_NOT + | T_NOTFOUND + // T_NULL reserved word + | T_NUMERIC + | T_NUMBER + | T_OBJECT + | T_OFF + | T_ON + | T_ONLY + | T_OPEN + | T_OR + | T_ORDER + | T_OUT + | T_OUTER + | T_OVER + | T_OVERWRITE + | T_OWNER + | T_PACKAGE + | T_PART_COUNT + | T_PART_LOC + | T_PARTITION + | T_PCTFREE + | T_PCTUSED + | T_PRECISION + | T_PRESERVE + | T_PRIMARY + | T_PRINT + | T_PROC + | T_PROCEDURE + | T_PWD + | T_QUALIFY + | T_QUERY_BAND + | T_QUIT + | T_QUOTED_IDENTIFIER + | T_RAISE + | T_RANK + | T_REAL + | T_REFERENCES + | T_REGEXP + | T_RR + | T_REPLACE + | T_RESIGNAL + | T_RESTRICT + | T_RESULT + | T_RESULT_SET_LOCATOR + | T_RETURN + | T_RETURNS + | T_REVERSE + | T_RIGHT + | T_RLIKE + | T_RS + | T_ROLE + | T_ROLLBACK + | T_ROW + | T_ROWS + | T_ROW_COUNT + | T_ROW_NUMBER + | T_SCHEMA + | T_SECOND + | T_SECONDS + | T_SECURITY + | T_SEGMENT + | T_SEL + | T_SELECT + | T_SESSION + | T_SESSIONS + | T_SET + | T_SETS + | T_SHARE + | T_SIGNAL + | T_SIMPLE_DOUBLE + | T_SIMPLE_FLOAT + | T_SMALLDATETIME + | T_SMALLINT + | T_SQL + | T_SQLEXCEPTION + | T_SQLINSERT + | T_SQLSTATE + | T_SQLWARNING + | T_STATS + | T_STATISTICS + | T_STEP + | T_STDEV + | T_STORAGE + | T_STORED + | T_STRING + | T_SUBDIR + | T_SUBSTRING + | T_SUM + | T_SUMMARY + | T_SYSDATE + | T_SYS_REFCURSOR + | T_TABLE + | T_TABLESPACE + | T_TEMPORARY + | T_TERMINATED + | T_TEXTIMAGE_ON + | T_THEN + | T_TIMESTAMP + | T_TITLE + | T_TO + | T_TOP + | T_TRANSACTION + | T_TRIM + | T_TRUE + | T_TRUNCATE + // T_UNION reserved word + | T_UNIQUE + | T_UPDATE + | T_UR + | T_USE + | T_USER + | T_USING + | T_VALUE + | T_VALUES + | T_VAR + | T_VARCHAR + | T_VARCHAR2 + | T_VARYING + | T_VARIANCE + | T_VOLATILE + // T_WHEN reserved word + // T_WHERE reserved word + | T_WHILE + | T_WITH + | T_WITHOUT + | T_WORK + | T_XACT_ABORT + | T_XML + | T_YES + ; + +// Lexer rules +T_ACTION : A C T I O N ; +T_ADD2 : A D D ; +T_ALL : A L L ; +T_ALLOCATE : A L L O C A T E ; +T_ALTER : A L T E R ; +T_AND : A N D ; +T_ANSI_NULLS : A N S I '_' N U L L S ; +T_ANSI_PADDING : A N S I '_' P A D D I N G ; +T_AS : A S ; +T_ASC : A S C ; +T_ASSOCIATE : A S S O C I A T E ; +T_AT : A T ; +T_AUTO_INCREMENT : A U T O '_' I N C R E M E N T ; +T_AVG : A V G ; +T_BATCHSIZE : B A T C H S I Z E ; +T_BEGIN : B E G I N ; +T_BETWEEN : B E T W E E N ; +T_BIGINT : B I G I N T ; +T_BINARY_DOUBLE : B I N A R Y '_' D O U B L E ; +T_BINARY_FLOAT : B I N A R Y '_' F L O A T ; +T_BINARY_INTEGER : B I N A R Y '_' I N T E G E R ; +T_BIT : B I T ; +T_BODY : B O D Y ; +T_BREAK : B R E A K ; +T_BULK : B U L K ; +T_BY : B Y ; +T_BYTE : B Y T E ; +T_CALL : C A L L ; +T_CALLER : C A L L E R ; +T_CASCADE : C A S C A D E ; +T_CASE : C A S E ; +T_CASESPECIFIC : C A S E S P E C I F I C ; +T_CAST : C A S T ; +T_CHAR : C H A R ; +T_CHARACTER : C H A R A C T E R ; +T_CHARSET : C H A R S E T ; +T_CLIENT : C L I E N T ; +T_CLOSE : C L O S E ; +T_CLUSTERED : C L U S T E R E D; +T_CMP : C M P ; +T_COLLECT : C O L L E C T ; +T_COLLECTION : C O L L E C T I O N ; +T_COLUMN : C O L U M N ; +T_COMMENT : C O M M E N T; +T_CONSTANT : C O N S T A N T ; +T_COMMIT : C O M M I T ; +T_COMPRESS : C O M P R E S S ; +T_CONCAT : C O N C A T; +T_CONDITION : C O N D I T I O N ; +T_CONSTRAINT : C O N S T R A I N T ; +T_CONTINUE : C O N T I N U E ; +T_COPY : C O P Y ; +T_COUNT : C O U N T ; +T_COUNT_BIG : C O U N T '_' B I G; +T_CREATE : C R E A T E ; +T_CREATION : C R E A T I O N ; +T_CREATOR : C R E A T O R ; +T_CS : C S; +T_CURRENT : C U R R E N T ; +T_CURRENT_SCHEMA : C U R R E N T '_' S C H E M A ; +T_CURSOR : C U R S O R ; +T_DATABASE : D A T A B A S E ; +T_DATA : D A T A ; +T_DATE : D A T E ; +T_DATETIME : D A T E T I M E ; +T_DAY : D A Y ; +T_DAYS : D A Y S ; +T_DEC : D E C ; +T_DECIMAL : D E C I M A L ; +T_DECLARE : D E C L A R E ; +T_DEFAULT : D E F A U L T ; +T_DEFERRED : D E F E R R E D ; +T_DEFINED : D E F I N E D ; +T_DEFINER : D E F I N E R ; +T_DEFINITION : D E F I N I T I O N ; +T_DELETE : D E L E T E ; +T_DELIMITED : D E L I M I T E D ; +T_DELIMITER : D E L I M I T E R ; +T_DESC : D E S C ; +T_DESCRIBE : D E S C R I B E ; +T_DIAGNOSTICS : D I A G N O S T I C S ; +T_DIR : D I R ; +T_DIRECTORY : D I R E C T O R Y ; +T_DISTINCT : D I S T I N C T ; +T_DISTRIBUTE : D I S T R I B U T E ; +T_DO : D O ; +T_DOUBLE : D O U B L E ; +T_DROP : D R O P ; +T_DYNAMIC : D Y N A M I C ; +T_ELSE : E L S E ; +T_ELSEIF : E L S E I F ; +T_ELSIF : E L S I F ; +T_ENABLE : E N A B L E ; +T_END : E N D ; +T_ENGINE : E N G I N E ; +T_ESCAPED : E S C A P E D ; +T_EXCEPT : E X C E P T ; +T_EXEC : E X E C ; +T_EXECUTE : E X E C U T E ; +T_EXCEPTION : E X C E P T I O N ; +T_EXCLUSIVE : E X C L U S I V E ; +T_EXISTS : E X I S T S ; +T_EXIT : E X I T ; +T_FALLBACK : F A L L B A C K ; +T_FALSE : F A L S E ; +T_FETCH : F E T C H ; +T_FIELDS : F I E L D S ; +T_FILE : F I L E ; +T_FILES : F I L E S ; +T_FLOAT : F L O A T ; +T_FOR : F O R ; +T_FOREIGN : F O R E I G N ; +T_FORMAT : F O R M A T ; +T_FOUND : F O U N D ; +T_FROM : F R O M ; +T_FULL : F U L L ; +T_FUNCTION : F U N C T I O N ; +T_GET : G E T ; +T_GLOBAL : G L O B A L ; +T_GO : G O ; +T_GRANT : G R A N T ; +T_GROUP : G R O U P ; +T_HANDLER : H A N D L E R ; +T_HASH : H A S H ; +T_HAVING : H A V I N G ; +T_HDFS : H D F S ; +T_HIVE : H I V E ; +T_HOST : H O S T ; +T_IDENTITY : I D E N T I T Y ; +T_IF : I F ; +T_IGNORE : I G N O R E ; +T_IMMEDIATE : I M M E D I A T E ; +T_IN : I N ; +T_INCLUDE : I N C L U D E ; +T_INDEX : I N D E X ; +T_INITRANS : I N I T R A N S ; +T_INNER : I N N E R ; +T_INOUT : I N O U T; +T_INSERT : I N S E R T ; +T_INT : I N T ; +T_INT2 : I N T '2'; +T_INT4 : I N T '4'; +T_INT8 : I N T '8'; +T_INTEGER : I N T E G E R ; +T_INTERSECT : I N T E R S E C T ; +T_INTERVAL : I N T E R V A L ; +T_INTO : I N T O ; +T_INVOKER : I N V O K E R ; +T_IS : I S ; +T_ISOPEN : I S O P E N ; +T_ITEMS : I T E M S ; +T_JOIN : J O I N ; +T_KEEP : K E E P; +T_KEY : K E Y ; +T_KEYS : K E Y S ; +T_LANGUAGE : L A N G U A G E ; +T_LEAVE : L E A V E ; +T_LEFT : L E F T ; +T_LIKE : L I K E ; +T_LIMIT : L I M I T ; +T_LINES : L I N E S ; +T_LOCAL : L O C A L ; +T_LOCATION : L O C A T I O N ; +T_LOCATOR : L O C A T O R ; +T_LOCATORS : L O C A T O R S ; +T_LOCKS : L O C K S ; +T_LOG : L O G ; +T_LOGGED : L O G G E D ; +T_LOGGING : L O G G I N G ; +T_LOOP : L O O P ; +T_MAP : M A P ; +T_MATCHED : M A T C H E D ; +T_MAX : M A X ; +T_MAXTRANS : M A X T R A N S ; +T_MERGE : M E R G E ; +T_MESSAGE_TEXT : M E S S A G E '_' T E X T ; +T_MICROSECOND : M I C R O S E C O N D ; +T_MICROSECONDS : M I C R O S E C O N D S; +T_MIN : M I N ; +T_MULTISET : M U L T I S E T ; +T_NCHAR : N C H A R ; +T_NEW : N E W ; +T_NVARCHAR : N V A R C H A R ; +T_NO : N O ; +T_NOCOUNT : N O C O U N T ; +T_NOCOMPRESS : N O C O M P R E S S ; +T_NOLOGGING : N O L O G G I N G ; +T_NONE : N O N E ; +T_NOT : N O T ; +T_NOTFOUND : N O T F O U N D ; +T_NULL : N U L L ; +T_NUMERIC : N U M E R I C ; +T_NUMBER : N U M B E R ; +T_OBJECT : O B J E C T ; +T_OFF : O F F ; +T_OF : O F ; +T_ON : O N ; +T_ONLY : O N L Y ; +T_OPEN : O P E N ; +T_OR : O R ; +T_ORDER : O R D E R; +T_OUT : O U T ; +T_OUTER : O U T E R ; +T_OVER : O V E R ; +T_OVERWRITE : O V E R W R I T E ; +T_OWNER : O W N E R ; +T_PACKAGE : P A C K A G E ; +T_PARTITION : P A R T I T I O N ; +T_PCTFREE : P C T F R E E ; +T_PCTUSED : P C T U S E D ; +T_PLS_INTEGER : P L S '_' I N T E G E R ; +T_PRECISION : P R E C I S I O N ; +T_PRESERVE : P R E S E R V E ; +T_PRIMARY : P R I M A R Y ; +T_PRINT : P R I N T ; +T_PROC : P R O C ; +T_PROCEDURE : P R O C E D U R E ; +T_QUALIFY : Q U A L I F Y ; +T_QUERY_BAND : Q U E R Y '_' B A N D ; +T_QUIT : Q U I T ; +T_QUOTED_IDENTIFIER : Q U O T E D '_' I D E N T I F I E R ; +T_RAISE : R A I S E ; +T_REAL : R E A L ; +T_REFERENCES : R E F E R E N C E S ; +T_REGEXP : R E G E X P ; +T_REPLACE : R E P L A C E ; +T_RESIGNAL : R E S I G N A L ; +T_RESTRICT : R E S T R I C T ; +T_RESULT : R E S U L T ; +T_RESULT_SET_LOCATOR : R E S U L T '_' S E T '_' L O C A T O R ; +T_RETURN : R E T U R N ; +T_RETURNS : R E T U R N S ; +T_REVERSE : R E V E R S E ; +T_RIGHT : R I G H T ; +T_RLIKE : R L I K E ; +T_ROLE : R O L E ; +T_ROLLBACK : R O L L B A C K ; +T_ROW : R O W ; +T_ROWS : R O W S ; +T_ROWTYPE : R O W T Y P E ; +T_ROW_COUNT : R O W '_' C O U N T ; +T_RR : R R; +T_RS : R S ; +T_PWD : P W D ; +T_TRIM : T R I M ; +T_SCHEMA : S C H E M A ; +T_SECOND : S E C O N D ; +T_SECONDS : S E C O N D S; +T_SECURITY : S E C U R I T Y ; +T_SEGMENT : S E G M E N T ; +T_SEL : S E L ; +T_SELECT : S E L E C T ; +T_SET : S E T ; +T_SESSION : S E S S I O N ; +T_SESSIONS : S E S S I O N S ; +T_SETS : S E T S; +T_SHARE : S H A R E ; +T_SIGNAL : S I G N A L ; +T_SIMPLE_DOUBLE : S I M P L E '_' D O U B L E ; +T_SIMPLE_FLOAT : S I M P L E '_' F L O A T ; +T_SIMPLE_INTEGER : S I M P L E '_' I N T E G E R ; +T_SMALLDATETIME : S M A L L D A T E T I M E ; +T_SMALLINT : S M A L L I N T ; +T_SQL : S Q L ; +T_SQLEXCEPTION : S Q L E X C E P T I O N ; +T_SQLINSERT : S Q L I N S E R T ; +T_SQLSTATE : S Q L S T A T E ; +T_SQLWARNING : S Q L W A R N I N G ; +T_STATS : S T A T S ; +T_STATISTICS : S T A T I S T I C S ; +T_STEP : S T E P ; +T_STORAGE : S T O R A G E ; +T_STORED : S T O R E D ; +T_STRING : S T R I N G ; +T_SUBDIR : S U B D I R ; +T_SUBSTRING : S U B S T R I N G ; +T_SUM : S U M ; +T_SUMMARY : S U M M A R Y ; +T_SYS_REFCURSOR : S Y S '_' R E F C U R S O R ; +T_TABLE : T A B L E ; +T_TABLESPACE : T A B L E S P A C E ; +T_TEMPORARY : T E M P O R A R Y ; +T_TERMINATED : T E R M I N A T E D ; +T_TEXTIMAGE_ON : T E X T I M A G E '_' O N ; +T_THEN : T H E N ; +T_TIMESTAMP : T I M E S T A M P ; +T_TINYINT : T I N Y I N T ; +T_TITLE : T I T L E ; +T_TO : T O ; +T_TOP : T O P ; +T_TRANSACTION : T R A N S A C T I O N ; +T_TRUE : T R U E ; +T_TRUNCATE : T R U N C A T E; +T_TYPE : T Y P E ; +T_UNION : U N I O N ; +T_UNIQUE : U N I Q U E ; +T_UPDATE : U P D A T E ; +T_UR : U R ; +T_USE : U S E ; +T_USING : U S I N G ; +T_VALUE : V A L U E ; +T_VALUES : V A L U E S ; +T_VAR : V A R ; +T_VARCHAR : V A R C H A R ; +T_VARCHAR2 : V A R C H A R '2' ; +T_VARYING : V A R Y I N G ; +T_VOLATILE : V O L A T I L E ; +T_WHEN : W H E N ; +T_WHERE : W H E R E ; +T_WHILE : W H I L E ; +T_WITH : W I T H ; +T_WITHOUT : W I T H O U T ; +T_WORK : W O R K ; +T_XACT_ABORT : X A C T '_' A B O R T ; +T_XML : X M L ; +T_YES : Y E S ; + +// Functions with specific syntax +T_ACTIVITY_COUNT : A C T I V I T Y '_' C O U N T ; +T_CUME_DIST : C U M E '_' D I S T ; +T_CURRENT_DATE : C U R R E N T '_' D A T E ; +T_CURRENT_TIMESTAMP : C U R R E N T '_' T I M E S T A M P ; +T_CURRENT_USER : C U R R E N T '_' U S E R ; +T_DENSE_RANK : D E N S E '_' R A N K ; +T_FIRST_VALUE : F I R S T '_' V A L U E; +T_LAG : L A G ; +T_LAST_VALUE : L A S T '_' V A L U E; +T_LEAD : L E A D ; +T_MAX_PART_STRING : M A X '_' P A R T '_' S T R I N G ; +T_MIN_PART_STRING : M I N '_' P A R T '_' S T R I N G ; +T_MAX_PART_INT : M A X '_' P A R T '_' I N T ; +T_MIN_PART_INT : M I N '_' P A R T '_' I N T ; +T_MAX_PART_DATE : M A X '_' P A R T '_' D A T E ; +T_MIN_PART_DATE : M I N '_' P A R T '_' D A T E ; +T_PART_COUNT : P A R T '_' C O U N T ; +T_PART_LOC : P A R T '_' L O C ; +T_RANK : R A N K ; +T_ROW_NUMBER : R O W '_' N U M B E R; +T_STDEV : S T D E V ; +T_SYSDATE : S Y S D A T E ; +T_VARIANCE : V A R I A N C E ; +T_USER : U S E R; + +T_ADD : '+' ; +T_COLON : ':' ; +T_COMMA : ',' ; +T_PIPE : '||' ; +T_DIV : '/' ; +T_DOT : '.' ; +T_DOT2 : '..' ; +T_EQUAL : '=' ; +T_EQUAL2 : '==' ; +T_NOTEQUAL : '<>' ; +T_NOTEQUAL2 : '!=' ; +T_GREATER : '>' ; +T_GREATEREQUAL : '>=' ; +T_LESS : '<' ; +T_LESSEQUAL : '<=' ; +T_MUL : '*' ; +T_OPEN_B : '{' ; +T_OPEN_P : '(' ; +T_OPEN_SB : '[' ; +T_CLOSE_B : '}' ; +T_CLOSE_P : ')' ; +T_CLOSE_SB : ']' ; +T_SEMICOLON : ';' ; +T_SUB : '-' ; + +L_ID : L_ID_PART // Identifier + ; +L_S_STRING : '\'' (('\'' '\'') | ('\\' '\'') | ~('\''))* '\'' // Single quoted string literal + ; +L_D_STRING : '"' (L_STR_ESC_D | .)*? '"' // Double quoted string literal + ; +L_INT : L_DIGIT+ ; // Integer +L_DEC : L_DIGIT+ '.' ~'.' L_DIGIT* // Decimal number + | '.' L_DIGIT+ + ; +L_WS : L_BLANK+ -> skip ; // Whitespace +L_M_COMMENT : '/*' .*? '*/' -> channel(HIDDEN) ; // Multiline comment +L_S_COMMENT : ('--' | '//') .*? '\r'? '\n' -> channel(HIDDEN) ; // Single line comment + +L_FILE : ([a-zA-Z] ':' '\\'?)? L_ID ('\\' L_ID)* // File path (a/b/c Linux path causes conflicts with division operator and handled at parser level) + ; + +L_LABEL : ([a-zA-Z] | L_DIGIT | '_')* ':' + ; + +fragment +L_ID_PART : + [a-zA-Z] ([a-zA-Z] | L_DIGIT | '_')* // Identifier part + | '$' '{' .*? '}' + | ('_' | '@' | ':' | '#' | '$') ([a-zA-Z] | L_DIGIT | '_' | '@' | ':' | '#' | '$')+ // (at least one char must follow special char) + | '"' .*? '"' // Quoted identifiers + | '[' .*? ']' + | '`' .*? '`' + ; +fragment +L_STR_ESC_D : // Double quoted string escape sequence + '""' | '\\"' + ; +fragment +L_DIGIT : [0-9] // Digit + ; +fragment +L_BLANK : (' ' | '\t' | '\r' | '\n') + ; + +// Support case-insensitive keywords and allowing case-sensitive identifiers +fragment A : ('a'|'A') ; +fragment B : ('b'|'B') ; +fragment C : ('c'|'C') ; +fragment D : ('d'|'D') ; +fragment E : ('e'|'E') ; +fragment F : ('f'|'F') ; +fragment G : ('g'|'G') ; +fragment H : ('h'|'H') ; +fragment I : ('i'|'I') ; +fragment J : ('j'|'J') ; +fragment K : ('k'|'K') ; +fragment L : ('l'|'L') ; +fragment M : ('m'|'M') ; +fragment N : ('n'|'N') ; +fragment O : ('o'|'O') ; +fragment P : ('p'|'P') ; +fragment Q : ('q'|'Q') ; +fragment R : ('r'|'R') ; +fragment S : ('s'|'S') ; +fragment T : ('t'|'T') ; +fragment U : ('u'|'U') ; +fragment V : ('v'|'V') ; +fragment W : ('w'|'W') ; +fragment X : ('x'|'X') ; +fragment Y : ('y'|'Y') ; +fragment Z : ('z'|'Z') ; diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index 8434270fcfd1d0..97a1c1721fb78d 100755 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -140,6 +140,7 @@ import org.apache.doris.ha.FrontendNodeType; import org.apache.doris.ha.HAProtocol; import org.apache.doris.ha.MasterInfo; +import org.apache.doris.hplsql.store.HplsqlManager; import org.apache.doris.httpv2.meta.MetaBaseAction; import org.apache.doris.journal.JournalCursor; import org.apache.doris.journal.JournalEntity; @@ -449,6 +450,8 @@ public class Env { private StatisticsCleaner statisticsCleaner; + private HplsqlManager hplsqlManager; + private BinlogManager binlogManager; private BinlogGcer binlogGcer; @@ -670,6 +673,7 @@ private Env(boolean isCheckpointCatalog) { this.queryStats = new QueryStats(); this.loadManagerAdapter = new LoadManagerAdapter(); this.hiveTransactionMgr = new HiveTransactionMgr(); + this.hplsqlManager = new HplsqlManager(); this.binlogManager = new BinlogManager(); this.binlogGcer = new BinlogGcer(); } @@ -743,6 +747,10 @@ public WorkloadGroupMgr getWorkloadGroupMgr() { return workloadGroupMgr; } + public HplsqlManager getHplsqlManager() { + return hplsqlManager; + } + // use this to get correct ClusterInfoService instance public static SystemInfoService getCurrentSystemInfo() { return getCurrentEnv().getClusterInfo(); @@ -1954,6 +1962,12 @@ public long loadWorkloadGroups(DataInputStream in, long checksum) throws IOExcep return checksum; } + public long loadHplsqlStored(DataInputStream in, long checksum) throws IOException { + hplsqlManager = HplsqlManager.read(in); + LOG.info("finished replay hplsql stored from image"); + return checksum; + } + public long loadSmallFiles(DataInputStream in, long checksum) throws IOException { smallFileMgr.readFields(in); LOG.info("finished replay smallFiles from image"); @@ -2230,6 +2244,11 @@ public long saveWorkloadGroups(CountingDataOutputStream dos, long checksum) thro return checksum; } + public long saveHplsqlStored(CountingDataOutputStream dos, long checksum) throws IOException { + Env.getCurrentEnv().getHplsqlManager().write(dos); + return checksum; + } + public long saveSmallFiles(CountingDataOutputStream dos, long checksum) throws IOException { smallFileMgr.write(dos); return checksum; @@ -2674,6 +2693,10 @@ public Frontend checkFeExist(String host, int port) { return null; } + public boolean checkFeHost(String host) { + return frontends.values().stream().anyMatch(fe -> fe.getHost().equals(host)); + } + public Frontend getFeByName(String name) { for (Frontend fe : frontends.values()) { if (fe.getNodeName().equals(name)) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Arguments.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Arguments.java new file mode 100644 index 00000000000000..efe289d752e495 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Arguments.java @@ -0,0 +1,215 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Arguments.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +public class Arguments { + private CommandLine commandLine; + private Options options = new Options(); + + String execString; + String fileName; + String main; + Map vars = new HashMap(); + + public static Arguments script(String str) { + Arguments arguments = new Arguments(); + arguments.parse(new String[] {"-e", str}); + return arguments; + } + + @SuppressWarnings("static-access") + public Arguments() { + // -e 'query' + options.addOption(OptionBuilder + .hasArg() + .withArgName("quoted-query-string") + .withDescription("HPL/SQL from command line") + .create('e')); + + // -f + options.addOption(OptionBuilder + .hasArg() + .withArgName("filename") + .withDescription("HPL/SQL from a file") + .create('f')); + + // -main entry_point_name + options.addOption(OptionBuilder + .hasArg() + .withArgName("procname") + .withDescription("Entry point (procedure or function name)") + .create("main")); + + // -hiveconf x=y + options.addOption(OptionBuilder + .withValueSeparator() + .hasArgs(2) + .withArgName("property=value") + .withLongOpt("hiveconf") + .withDescription("Value for given property") + .create()); + + // Substitution option -d, --define + options.addOption(OptionBuilder + .withValueSeparator() + .hasArgs(2) + .withArgName("key=value") + .withLongOpt("define") + .withDescription("Variable substitution e.g. -d A=B or --define A=B") + .create('d')); + + // Substitution option --hivevar + options.addOption(OptionBuilder + .withValueSeparator() + .hasArgs(2) + .withArgName("key=value") + .withLongOpt("hivevar") + .withDescription("Variable substitution e.g. --hivevar A=B") + .create()); + + // [-version|--version] + options.addOption(new Option("version", "version", false, "Print HPL/SQL version")); + + // [-trace|--trace] + options.addOption(new Option("trace", "trace", false, "Print debug information")); + + // [-offline|--offline] + options.addOption(new Option("offline", "offline", false, "Offline mode - skip SQL execution")); + + // [-H|--help] + options.addOption(new Option("H", "help", false, "Print help information")); + } + + /** + * Parse the command line arguments + */ + public boolean parse(String[] args) { + try { + commandLine = new GnuParser().parse(options, args); + execString = commandLine.getOptionValue('e'); + fileName = commandLine.getOptionValue('f'); + main = commandLine.getOptionValue("main"); + Properties p = commandLine.getOptionProperties("hiveconf"); + for (String key : p.stringPropertyNames()) { + vars.put(key, p.getProperty(key)); + } + p = commandLine.getOptionProperties("hivevar"); + for (String key : p.stringPropertyNames()) { + vars.put(key, p.getProperty(key)); + } + p = commandLine.getOptionProperties("define"); + for (String key : p.stringPropertyNames()) { + vars.put(key, p.getProperty(key)); + } + } catch (ParseException e) { + System.err.println(e.getMessage()); + return false; + } + return true; + } + + /** + * Get the value of execution option -e + */ + public String getExecString() { + return execString; + } + + /** + * Get the value of file option -f + */ + public String getFileName() { + return fileName; + } + + /** + * Get the value of -main option + */ + public String getMain() { + return main; + } + + /** + * Get the variables + */ + public Map getVars() { + return vars; + } + + /** + * Test whether version option is set + */ + public boolean hasVersionOption() { + if (commandLine.hasOption("version")) { + return true; + } + return false; + } + + /** + * Test whether debug option is set + */ + public boolean hasTraceOption() { + if (commandLine.hasOption("trace")) { + return true; + } + return false; + } + + /** + * Test whether offline option is set + */ + public boolean hasOfflineOption() { + if (commandLine.hasOption("offline")) { + return true; + } + return false; + } + + /** + * Test whether help option is set + */ + public boolean hasHelpOption() { + if (commandLine.hasOption('H')) { + return true; + } + return false; + } + + /** + * Print help information + */ + public void printHelp() { + new HelpFormatter().printHelp("hplsql", options); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Cmp.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Cmp.java new file mode 100644 index 00000000000000..5013256fbe04af --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Cmp.java @@ -0,0 +1,311 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.executor.Metadata; +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.math.BigDecimal; +import java.util.List; + +public class Cmp implements Runnable { + + Exec exec; + private QueryExecutor queryExecutor; + Timer timer = new Timer(); + boolean trace = false; + boolean info = false; + + String query; + String conn; + org.apache.doris.hplsql.HplsqlParser.Cmp_stmtContext ctx; + + int tests = 0; + int failedTests = 0; + int failedTestsHighDiff = 0; + int failedTestsHighDiff10 = 0; + private QueryResult result; + + Cmp(Exec e, QueryExecutor queryExecutor) { + exec = e; + trace = exec.getTrace(); + info = exec.getInfo(); + this.queryExecutor = queryExecutor; + } + + Cmp(Exec e, org.apache.doris.hplsql.HplsqlParser.Cmp_stmtContext c, String q, String cn, + QueryExecutor queryExecutor) { + exec = e; + trace = exec.getTrace(); + info = exec.getInfo(); + ctx = c; + query = q; + conn = cn; + this.queryExecutor = queryExecutor; + } + + /** + * Run CMP command + */ + Integer run(org.apache.doris.hplsql.HplsqlParser.Cmp_stmtContext ctx) { + trace(ctx, "CMP"); + this.ctx = ctx; + timer.start(); + StringBuilder conn1 = new StringBuilder(); + StringBuilder conn2 = new StringBuilder(); + Boolean equal = null; + Cmp cmp1 = null; + Cmp cmp2 = null; + try { + String sql1 = getSql(ctx, conn1, 0); + String sql2 = getSql(ctx, conn2, 1); + if (trace) { + trace(ctx, "Query 1: " + sql1); + trace(ctx, "Query 2: " + sql2); + } + cmp1 = new Cmp(exec, ctx, sql1, conn1.toString(), queryExecutor); + cmp2 = new Cmp(exec, ctx, sql2, conn2.toString(), queryExecutor); + cmp1.run(); + cmp2.run(); + equal = compare(cmp1.result, cmp2.result); + } catch (Exception e) { + exec.signal(e); + return -1; + } finally { + timer.stop(); + if (info) { + String message = "CMP "; + if (equal != null) { + if (equal) { + message += "Equal, " + tests + " tests"; + } else { + message += "Not Equal, " + failedTests + " of " + tests + " tests failed"; + message += ", " + failedTestsHighDiff + " tests with more than 0.01% difference"; + message += ", " + failedTestsHighDiff10 + " tests with more than 10% difference"; + } + } else { + message += "Failed"; + } + info(ctx, message + ", " + timer.format()); + } + cmp1.closeQuery(); + cmp2.closeQuery(); + } + return 0; + } + + private void closeQuery() { + if (result != null) { + result.close(); + } + } + + /** + * Get data for comparison from the source + */ + @Override + public void run() { + result = queryExecutor.executeQuery(query, ctx); + } + + /** + * Compare the results + */ + Boolean compare(QueryResult query1, QueryResult query2) { + if (query1.error()) { + exec.signal(query1); + return null; + } else if (query2.error()) { + exec.signal(query2); + return null; + } + boolean equal = true; + tests = 0; + failedTests = 0; + try { + Metadata rm1 = query1.metadata(); + Metadata rm2 = query2.metadata(); + int cnt1 = rm1.columnCount(); + int cnt2 = rm2.columnCount(); + tests = cnt1; + while (query1.next() && query2.next()) { + for (int i = 0; i < tests; i++) { + Var v1 = new Var(org.apache.doris.hplsql.Var.Type.DERIVED_TYPE); + Var v2 = new Var(org.apache.doris.hplsql.Var.Type.DERIVED_TYPE); + v1.setValue(query1, i); + if (i < cnt2) { + v2.setValue(query2, i); + } + boolean e = true; + if (!(v1.isNull() && v2.isNull()) && !v1.equals(v2)) { + equal = false; + e = false; + failedTests++; + } + if (trace || info) { + String m = rm1.columnName(i) + "\t" + v1 + "\t" + v2; + if (!e) { + m += "\tNot equal"; + BigDecimal diff = v1.percentDiff(v2); + if (diff != null) { + if (diff.compareTo(BigDecimal.ZERO) != 0) { + m += ", " + diff + "% difference"; + failedTestsHighDiff++; + if (diff.compareTo(BigDecimal.TEN) > 0) { + failedTestsHighDiff10++; + } + } else { + m += ", less then 0.01% difference"; + } + } else { + failedTestsHighDiff++; + failedTestsHighDiff10++; + } + } + if (trace) { + trace(null, m); + } else { + info(null, m); + } + } + } + if (equal) { + exec.setSqlSuccess(); + } else { + exec.setSqlCode(1); + } + } + } catch (Exception e) { + exec.signal(e); + return null; + } + return Boolean.valueOf(equal); + } + + /** + * Define the SQL query to access data + */ + private String getSql(org.apache.doris.hplsql.HplsqlParser.Cmp_stmtContext ctx, StringBuilder conn, int idx) + throws Exception { + StringBuilder sql = new StringBuilder(); + String table = null; + String query = null; + if (ctx.cmp_source(idx).table_name() != null) { + table = evalPop(ctx.cmp_source(idx).table_name()).toString(); + } else { + query = evalPop(ctx.cmp_source(idx).select_stmt()).toString(); + } + if (ctx.cmp_source(idx).T_AT() != null) { + conn.append(ctx.cmp_source(idx).qident().getText()); + } else if (table != null) { + conn.append(exec.getObjectConnection(ctx.cmp_source(idx).table_name().getText())); + } else { + conn.append(exec.getStatementConnection()); + } + sql.append("SELECT "); + sql.append(getSelectList(ctx, conn.toString(), table)); + sql.append(" FROM "); + if (table != null) { + sql.append(table); + if (ctx.cmp_source(idx).where_clause() != null) { + sql.append(" " + evalPop(ctx.cmp_source(idx).where_clause()).toString()); + } + } else { + sql.append("("); + sql.append(query); + sql.append(") t"); + } + return sql.toString(); + } + + /** + * Define SELECT listto access data + */ + private String getSelectList(org.apache.doris.hplsql.HplsqlParser.Cmp_stmtContext ctx, String conn, String table) { + StringBuilder sql = new StringBuilder(); + sql.append("COUNT(1) AS row_count"); + if (ctx.T_SUM() != null && table != null) { + Row row = exec.meta.getRowDataType(ctx, conn, table); + if (row != null) { + List cols = row.getColumns(); + int cnt = row.size(); + sql.append(",\n"); + for (int i = 0; i < cnt; i++) { + Column col = cols.get(i); + String name = col.getName(); + org.apache.doris.hplsql.Var.Type type + = org.apache.doris.hplsql.Var.defineType(col.getType()); + sql.append("COUNT(" + name + ") AS " + name + "_COUNT_NOT_NULL"); + if (type == org.apache.doris.hplsql.Var.Type.STRING) { + sql.append(",\n"); + sql.append("SUM(LENGTH(" + name + ")) AS " + name + "_SUM_LENGTH,\n"); + sql.append("MIN(LENGTH(" + name + ")) AS " + name + "_MIN_LENGTH,\n"); + sql.append("MAX(LENGTH(" + name + ")) AS " + name + "_MAX_LENGTH"); + } else if (type == org.apache.doris.hplsql.Var.Type.BIGINT + || type == org.apache.doris.hplsql.Var.Type.DECIMAL + || type == org.apache.doris.hplsql.Var.Type.DOUBLE) { + sql.append(",\n"); + sql.append("SUM(" + name + ") AS " + name + "_SUM,\n"); + sql.append("MIN(" + name + ") AS " + name + "_MIN,\n"); + sql.append("MAX(" + name + ") AS " + name + "_MAX"); + } else if (type == org.apache.doris.hplsql.Var.Type.DATE + || type == org.apache.doris.hplsql.Var.Type.TIMESTAMP) { + sql.append(",\n"); + sql.append("SUM(YEAR(" + name + ")) AS " + name + "_SUM_YEAR,\n"); + sql.append("SUM(MONTH(" + name + ")) AS " + name + "_SUM_MONTH,\n"); + sql.append("SUM(DAY(" + name + ")) AS " + name + "_SUM_DAY,\n"); + sql.append("MIN(" + name + ") AS " + name + "_MIN,\n"); + sql.append("MAX(" + name + ") AS " + name + "_MAX"); + } + if (i + 1 < cnt) { + sql.append(",\n"); + } + } + } + } + return sql.toString(); + } + + /** + * Evaluate the expression and pop value from the stack + */ + private Var evalPop(ParserRuleContext ctx) { + exec.visit(ctx); + if (!exec.stack.isEmpty()) { + return exec.stackPop(); + } + return org.apache.doris.hplsql.Var.Empty; + } + + /** + * Trace and information + */ + private void trace(ParserRuleContext ctx, String message) { + exec.trace(ctx, message); + } + + private void info(ParserRuleContext ctx, String message) { + exec.info(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Column.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Column.java new file mode 100644 index 00000000000000..f857d16b33b5be --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Column.java @@ -0,0 +1,70 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Column.java +// and modified by Doris + +package org.apache.doris.hplsql; + +/** + * Table column + */ +public class Column { + private org.apache.doris.hplsql.ColumnDefinition definition; + private Var value; + + public Column(String name, String type, Var value) { + this.definition = new org.apache.doris.hplsql.ColumnDefinition(name, + org.apache.doris.hplsql.ColumnType.parse(type)); + this.value = value; + } + + /** + * Set the column value + */ + public void setValue(Var value) { + this.value = value; + } + + /** + * Get the column name + */ + public String getName() { + return definition.columnName(); + } + + /** + * Get the column type + */ + public String getType() { + return definition.columnType().typeString(); + } + + public org.apache.doris.hplsql.ColumnDefinition definition() { + return definition; + } + + /** + * Get the column value + */ + Var getValue() { + return value; + } +} + + + diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnDefinition.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnDefinition.java new file mode 100644 index 00000000000000..3b70197602724a --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnDefinition.java @@ -0,0 +1,47 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ColumnDefinition.java +// and modified by Doris + +package org.apache.doris.hplsql; + +public class ColumnDefinition { + private final String name; + private final ColumnType type; + + public static ColumnDefinition unnamed(ColumnType type) { + return new ColumnDefinition("__UNNAMED__", type); + } + + public ColumnDefinition(String name, ColumnType type) { + this.name = name; + this.type = type; + } + + public String columnName() { + return name; + } + + public ColumnType columnType() { + return type; + } + + public String columnTypeString() { + return type.typeString(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnMap.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnMap.java new file mode 100644 index 00000000000000..cb790bc58c439c --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnMap.java @@ -0,0 +1,53 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ColumnMap.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ColumnMap { + private List columns = new ArrayList<>(); + private Map columnMap = new HashMap<>(); + + public void add(Column column) { + columns.add(column); + columnMap.put(column.getName().toUpperCase(), column); + } + + public Column get(String name) { + return columnMap.get(name.toUpperCase()); + } + + public Column at(int index) { + return columns.get(index); + } + + public List columns() { + return Collections.unmodifiableList(columns); + } + + public int size() { + return columns.size(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnType.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnType.java new file mode 100644 index 00000000000000..3abcf7d3904b17 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/ColumnType.java @@ -0,0 +1,76 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ColumnType.java +// and modified by Doris + +package org.apache.doris.hplsql; + +public class ColumnType { + private final String type; + private final Precision precision; + + public static ColumnType parse(String type) { + return new ColumnType(parseType(type), Precision.parse(type)); + } + + public ColumnType(String type, Precision precision) { + this.type = type; + this.precision = precision; + } + + private static String parseType(String type) { + int index = type.indexOf('('); + return index == -1 ? type : type.substring(0, index); + } + + public String typeString() { + return type; + } + + public Precision precision() { + return precision; + } + + private static class Precision { + public final int len; + public final int scale; + + public static Precision parse(String type) { + int open = type.indexOf('('); + if (open == -1) { + return new Precision(0, 0); + } + int len; + int scale = 0; + int comma = type.indexOf(',', open); + int close = type.indexOf(')', open); + if (comma == -1) { + len = Integer.parseInt(type.substring(open + 1, close)); + } else { + len = Integer.parseInt(type.substring(open + 1, comma)); + scale = Integer.parseInt(type.substring(comma + 1, close)); + } + return new Precision(scale, len); + } + + Precision(int scale, int len) { + this.len = len; + this.scale = scale; + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Conf.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Conf.java new file mode 100644 index 00000000000000..f79576aab61b95 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Conf.java @@ -0,0 +1,173 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Conf.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.Exec.OnError; + +import org.apache.hadoop.conf.Configuration; + +import java.net.URL; +import java.util.HashMap; + +/** + * HPL/SQL run-time configuration + */ +public class Conf extends Configuration { + + public static final String SITE_XML = "hplsql-site.xml"; + public static final String DOT_HPLSQLRC = ".hplsqlrc"; + public static final String HPLSQLRC = "hplsqlrc"; + public static final String HPLSQL_LOCALS_SQL = "hplsql_locals.sql"; + + public static final String CONN_CONVERT = "hplsql.conn.convert."; + public static final String CONN_DEFAULT = "hplsql.conn.default"; + public static final String DUAL_TABLE = "hplsql.dual.table"; + public static final String INSERT_VALUES = "hplsql.insert.values"; + public static final String ONERROR = "hplsql.onerror"; + public static final String TEMP_TABLES = "hplsql.temp.tables"; + public static final String TEMP_TABLES_SCHEMA = "hplsql.temp.tables.schema"; + public static final String TEMP_TABLES_LOCATION = "hplsql.temp.tables.location"; + + public static final String TRUE = "true"; + public static final String FALSE = "false"; + public static final String YES = "yes"; + public static final String NO = "no"; + + public enum InsertValues { + NATIVE, SELECT + } + + public enum TempTables { + NATIVE, MANAGED + } + + public String defaultConnection; + + OnError onError = OnError.EXCEPTION; + InsertValues insertValues = InsertValues.NATIVE; + TempTables tempTables = TempTables.NATIVE; + + String dualTable = null; + + String tempTablesSchema = ""; + String tempTablesLocation = "/tmp/hplsql"; + + HashMap connConvert = new HashMap(); + + /** + * Set an option + */ + public void setOption(String key, String value) { + if (key.startsWith(CONN_CONVERT)) { + setConnectionConvert(key.substring(19), value); + } else if (key.compareToIgnoreCase(CONN_DEFAULT) == 0) { + defaultConnection = value; + } else if (key.compareToIgnoreCase(DUAL_TABLE) == 0) { + dualTable = value; + } else if (key.compareToIgnoreCase(INSERT_VALUES) == 0) { + setInsertValues(value); + } else if (key.compareToIgnoreCase(ONERROR) == 0) { + setOnError(value); + } else if (key.compareToIgnoreCase(TEMP_TABLES) == 0) { + setTempTables(value); + } else if (key.compareToIgnoreCase(TEMP_TABLES_SCHEMA) == 0) { + tempTablesSchema = value; + } else if (key.compareToIgnoreCase(TEMP_TABLES_LOCATION) == 0) { + tempTablesLocation = value; + } + } + + /** + * Set hplsql.insert.values option + */ + private void setInsertValues(String value) { + if (value.compareToIgnoreCase("NATIVE") == 0) { + insertValues = InsertValues.NATIVE; + } else if (value.compareToIgnoreCase("SELECT") == 0) { + insertValues = InsertValues.SELECT; + } + } + + /** + * Set hplsql.temp.tables option + */ + private void setTempTables(String value) { + if (value.compareToIgnoreCase("NATIVE") == 0) { + tempTables = TempTables.NATIVE; + } else if (value.compareToIgnoreCase("MANAGED") == 0) { + tempTables = TempTables.MANAGED; + } + } + + /** + * Set error handling approach + */ + private void setOnError(String value) { + if (value.compareToIgnoreCase("EXCEPTION") == 0) { + onError = OnError.EXCEPTION; + } else if (value.compareToIgnoreCase("SETERROR") == 0) { + onError = OnError.SETERROR; + } + if (value.compareToIgnoreCase("STOP") == 0) { + onError = OnError.STOP; + } + } + + /** + * Set whether convert or not SQL for the specified connection profile + */ + void setConnectionConvert(String name, String value) { + boolean convert = false; + if (value.compareToIgnoreCase(TRUE) == 0 || value.compareToIgnoreCase(YES) == 0) { + convert = true; + } + connConvert.put(name, convert); + } + + /** + * Get whether convert or not SQL for the specified connection profile + */ + boolean getConnectionConvert(String name) { + Boolean convert = connConvert.get(name); + if (convert != null) { + return convert.booleanValue(); + } + return false; + } + + /** + * Load parameters + */ + public void init() { + addResource(SITE_XML); + } + + /** + * Get the location of the configuration file + */ + public String getLocation() { + URL url = getResource(SITE_XML); + if (url != null) { + return url.toString(); + } + return ""; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Conn.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Conn.java new file mode 100644 index 00000000000000..ab884e662f0cf5 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Conn.java @@ -0,0 +1,253 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Stack; + +public class Conn { + + public enum Type { + DB2, HIVE, MYSQL, TERADATA + } + + HashMap> connections = new HashMap>(); + HashMap connStrings = new HashMap(); + HashMap connTypes = new HashMap(); + + HashMap> connInits = new HashMap>(); + HashMap> preSql = new HashMap>(); + + Exec exec; + Timer timer = new Timer(); + boolean trace = false; + boolean info = false; + + Conn(Exec e) { + exec = e; + trace = exec.getTrace(); + info = exec.getInfo(); + } + + /** + * Execute a SQL query + */ + public Query executeQuery(Query query, String connName) { + try { + Connection conn = getConnection(connName); + runPreSql(connName, conn); + Statement stmt = conn.createStatement(); + exec.info(null, "Starting query"); + timer.start(); + ResultSet rs = stmt.executeQuery(query.sql); + timer.stop(); + query.set(conn, stmt, rs); + if (info) { + exec.info(null, "Query executed successfully (" + timer.format() + ")"); + } + } catch (Exception e) { + query.setError(e); + } + return query; + } + + /** + * Prepare a SQL query + */ + public Query prepareQuery(Query query, String connName) { + try { + Connection conn = getConnection(connName); + timer.start(); + PreparedStatement stmt = conn.prepareStatement(query.sql); + timer.stop(); + query.set(conn, stmt); + if (info) { + exec.info(null, "Prepared statement executed successfully (" + timer.format() + ")"); + } + } catch (Exception e) { + query.setError(e); + } + return query; + } + + /** + * Close the query object + */ + public void closeQuery(Query query, String connName) { + query.closeStatement(); + returnConnection(connName, query.getConnection()); + } + + /** + * Run pre-SQL statements + * + * @throws SQLException + */ + void runPreSql(String connName, Connection conn) throws SQLException { + ArrayList sqls = preSql.get(connName); + if (sqls != null) { + Statement s = conn.createStatement(); + for (String sql : sqls) { + exec.info(null, "Starting pre-SQL statement"); + s.execute(sql); + } + s.close(); + preSql.remove(connName); + } + } + + /** + * Get a connection + * + * @throws Exception + */ + synchronized Connection getConnection(String connName) throws Exception { + Stack connStack = connections.get(connName); + String connStr = connStrings.get(connName); + if (connStr == null) { + throw new Exception("Unknown connection profile: " + connName); + } + if (connStack != null && !connStack.empty()) { // Reuse an existing connection + return connStack.pop(); + } + Connection c = openConnection(connStr); + ArrayList sqls = connInits.get(connName); // Run initialization statements on the connection + if (sqls != null) { + Statement s = c.createStatement(); + for (String sql : sqls) { + s.execute(sql); + } + s.close(); + } + return c; + } + + /** + * Open a new connection + * + * @throws Exception + */ + Connection openConnection(String connStr) throws Exception { + String driver = "com.mysql.jdbc.Driver"; + StringBuilder url = new StringBuilder(); + String usr = ""; + String pwd = ""; + if (connStr != null) { + String[] c = connStr.split(";"); + if (c.length >= 1) { + driver = c[0]; + } + if (c.length >= 2) { + url.append(c[1]); + } else { + url.append("jdbc:mysql://"); + } + for (int i = 2; i < c.length; i++) { + if (c[i].contains("=")) { + url.append(";"); + url.append(c[i]); + } else if (usr.isEmpty()) { + usr = c[i]; + } else if (pwd.isEmpty()) { + pwd = c[i]; + } + } + } + Class.forName(driver); + timer.start(); + Connection conn = DriverManager.getConnection(url.toString().trim(), usr, pwd); + timer.stop(); + if (info) { + exec.info(null, "Open connection: " + url + " (" + timer.format() + ")"); + } + return conn; + } + + /** + * Get the database type by profile name + */ + Conn.Type getTypeByProfile(String name) { + return connTypes.get(name); + } + + /** + * Get the database type by connection string + */ + Conn.Type getType(String connStr) { + if (connStr.contains("hive.")) { + return Type.HIVE; + } else if (connStr.contains("db2.")) { + return Type.DB2; + } else if (connStr.contains("mysql.")) { + return Type.MYSQL; + } else if (connStr.contains("teradata.")) { + return Type.TERADATA; + } + return Type.HIVE; + } + + /** + * Return the connection to the pool + */ + void returnConnection(String name, Connection conn) { + if (conn != null) { + connections.get(name).push(conn); + } + } + + /** + * Add a new connection string + */ + public void addConnection(String name, String connStr) { + connections.put(name, new Stack()); + connStrings.put(name, connStr); + connTypes.put(name, getType(connStr)); + } + + /** + * Add initialization statements for the specified connection + */ + public void addConnectionInit(String name, String connInit) { + ArrayList a = new ArrayList(); + String[] sa = connInit.split(";"); + for (String s : sa) { + s = s.trim(); + if (!s.isEmpty()) { + a.add(s); + } + } + connInits.put(name, a); + } + + /** + * Add SQL statements to be executed before executing the next SQL statement (pre-SQL) + */ + public void addPreSql(String name, ArrayList sql) { + preSql.put(name, sql); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Console.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Console.java new file mode 100644 index 00000000000000..57a0201ac6d226 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Console.java @@ -0,0 +1,51 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Console.java +// and modified by Doris + +package org.apache.doris.hplsql; + +public interface Console { + void print(String msg); + + void printLine(String msg); + + void printError(String msg); + + void flushConsole(); + + Console STANDARD = new Console() { + @Override + public void print(String msg) { + System.out.print(msg); + } + + @Override + public void printLine(String msg) { + System.out.println(msg); + } + + @Override + public void printError(String msg) { + System.err.println(msg); + } + + @Override + public void flushConsole() {} + }; +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Converter.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Converter.java new file mode 100644 index 00000000000000..56ac5cee9ce791 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Converter.java @@ -0,0 +1,80 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Converter.java +// and modified by Doris + +package org.apache.doris.hplsql; + +/** + * On-the-fly SQL Converter + */ +public class Converter { + + Exec exec; + boolean trace = false; + + Converter(Exec e) { + exec = e; + trace = exec.getTrace(); + } + + /** + * Convert a data type + */ + String dataType(org.apache.doris.hplsql.HplsqlParser.DtypeContext type, + org.apache.doris.hplsql.HplsqlParser.Dtype_lenContext len) { + String t = exec.getText(type); + boolean enclosed = false; + if (t.charAt(0) == '[') { + t = t.substring(1, t.length() - 1); + enclosed = true; + } + if (t.equalsIgnoreCase("BIT")) { + t = "TINYINT"; + } else if (t.equalsIgnoreCase("INT") || t.equalsIgnoreCase("INTEGER")) { + // MySQL can use INT(n) + } else if (t.equalsIgnoreCase("INT2")) { + t = "SMALLINT"; + } else if (t.equalsIgnoreCase("INT4")) { + t = "INT"; + } else if (t.equalsIgnoreCase("INT8")) { + t = "BIGINT"; + } else if (t.equalsIgnoreCase("DATETIME") || t.equalsIgnoreCase("SMALLDATETIME")) { + t = "TIMESTAMP"; + } else if ((t.equalsIgnoreCase("VARCHAR") || t.equalsIgnoreCase("NVARCHAR")) && len.T_MAX() != null) { + t = "STRING"; + } else if (t.equalsIgnoreCase("VARCHAR2") || t.equalsIgnoreCase("NCHAR") || t.equalsIgnoreCase("NVARCHAR") + || t.equalsIgnoreCase("TEXT")) { + t = "STRING"; + } else if (t.equalsIgnoreCase("NUMBER") || t.equalsIgnoreCase("NUMERIC")) { + t = "DECIMAL"; + if (len != null) { + t += exec.getText(len); + } + } else if (len != null) { + if (!enclosed) { + return exec.getText(type, type.getStart(), len.getStop()); + } else { + return t + exec.getText(len, len.getStart(), len.getStop()); + } + } else if (!enclosed) { + return exec.getText(type, type.getStart(), type.getStop()); + } + return t; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Copy.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Copy.java new file mode 100644 index 00000000000000..c5be5cd44a2d90 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Copy.java @@ -0,0 +1,446 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.apache.commons.lang3.StringEscapeUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.math.RoundingMode; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.text.DecimalFormat; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class Copy { + + Exec exec; + Timer timer = new Timer(); + boolean trace = false; + boolean info = false; + + long srcSizeInBytes = 0; + + String delimiter = "\t"; + boolean sqlInsert = false; + String sqlInsertName; + String targetConn; + int batchSize = 1000; + + boolean overwrite = false; + boolean delete = false; + boolean ignore = false; + private QueryExecutor queryExecutor; + + Copy(Exec e, QueryExecutor queryExecutor) { + exec = e; + trace = exec.getTrace(); + info = exec.getInfo(); + this.queryExecutor = queryExecutor; + } + + /** + * Run COPY command + */ + Integer run(org.apache.doris.hplsql.HplsqlParser.Copy_stmtContext ctx) { + trace(ctx, "COPY"); + initOptions(ctx); + StringBuilder sql = new StringBuilder(); + if (ctx.table_name() != null) { + String table = evalPop(ctx.table_name()).toString(); + sql.append("SELECT * FROM "); + sql.append(table); + } else { + sql.append(evalPop(ctx.select_stmt()).toString()); + if (trace) { + trace(ctx, "Statement:\n" + sql); + } + } + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + try { + if (targetConn != null) { + copyToTable(ctx, query); + } else { + copyToFile(ctx, query); + } + } catch (Exception e) { + exec.signal(e); + return 1; + } finally { + query.close(); + } + return 0; + } + + /** + * Copy the query results to another table + * + * @throws Exception + */ + void copyToTable(org.apache.doris.hplsql.HplsqlParser.Copy_stmtContext ctx, QueryResult query) throws Exception { + int cols = query.columnCount(); + int rows = 0; + if (trace) { + trace(ctx, "SELECT executed: " + cols + " columns"); + } + Connection conn = exec.getConnection(targetConn); + StringBuilder sql = new StringBuilder(); + sql.append("INSERT INTO " + sqlInsertName + " VALUES ("); + for (int i = 0; i < cols; i++) { + sql.append("?"); + if (i + 1 < cols) { + sql.append(","); + } + } + sql.append(")"); + PreparedStatement ps = conn.prepareStatement(sql.toString()); + long start = timer.start(); + long prev = start; + boolean batchOpen = false; + while (query.next()) { + for (int i = 0; i < cols; i++) { + ps.setObject(i, query.column(i, Object.class)); + } + rows++; + if (batchSize > 1) { + ps.addBatch(); + batchOpen = true; + if (rows % batchSize == 0) { + ps.executeBatch(); + batchOpen = false; + } + } else { + ps.executeUpdate(); + } + if (trace && rows % 100 == 0) { + long cur = timer.current(); + if (cur - prev > 10000) { + trace(ctx, "Copying rows: " + rows + " (" + rows / ((cur - start) / 1000) + " rows/sec)"); + prev = cur; + } + } + } + if (batchOpen) { + ps.executeBatch(); + } + ps.close(); + exec.returnConnection(targetConn, conn); + exec.setRowCount(rows); + long elapsed = timer.stop(); + if (info) { + DecimalFormat df = new DecimalFormat("#,##0.00"); + df.setRoundingMode(RoundingMode.HALF_UP); + info(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + df.format( + rows / (elapsed / 1000.0)) + " rows/sec"); + } + } + + /** + * Copy the query results to a file + * + * @throws Exception + */ + void copyToFile(org.apache.doris.hplsql.HplsqlParser.Copy_stmtContext ctx, QueryResult query) throws Exception { + String filename = null; + if (ctx.copy_target().expr() != null) { + filename = evalPop(ctx.copy_target().expr()).toString(); + } else { + filename = ctx.copy_target().getText(); + } + byte[] del = delimiter.getBytes(); + byte[] rowdel = "\n".getBytes(); + byte[] nullstr = "NULL".getBytes(); + int cols = query.columnCount(); + int rows = 0; + long bytes = 0; + if (trace || info) { + String mes = "Query executed: " + cols + " columns, output file: " + filename; + if (trace) { + trace(ctx, mes); + } else { + info(ctx, mes); + } + } + java.io.File file = null; + File hdfsFile = null; + if (ctx.T_HDFS() == null) { + file = new java.io.File(filename); + } else { + hdfsFile = new File(); + } + OutputStream out = null; + timer.start(); + try { + if (file != null) { + if (!file.exists()) { + file.createNewFile(); + } + out = new FileOutputStream(file, false /*append*/); + } else { + out = hdfsFile.create(filename, true /*overwrite*/); + } + String col; + String sql = ""; + if (sqlInsert) { + sql = "INSERT INTO " + sqlInsertName + " VALUES ("; + rowdel = ");\n".getBytes(); + } + while (query.next()) { + if (sqlInsert) { + out.write(sql.getBytes()); + } + for (int i = 0; i < cols; i++) { + if (i > 0) { + out.write(del); + bytes += del.length; + } + col = query.column(i, String.class); + if (col != null) { + if (sqlInsert) { + col = org.apache.doris.hplsql.Utils.quoteString(col); + } + byte[] b = col.getBytes(); + out.write(b); + bytes += b.length; + } else if (sqlInsert) { + out.write(nullstr); + } + } + out.write(rowdel); + bytes += rowdel.length; + rows++; + } + exec.setRowCount(rows); + } finally { + if (out != null) { + out.close(); + } + } + long elapsed = timer.stop(); + if (info) { + DecimalFormat df = new DecimalFormat("#,##0.00"); + df.setRoundingMode(RoundingMode.HALF_UP); + info(ctx, "COPY completed: " + rows + " row(s), " + + org.apache.doris.hplsql.Utils.formatSizeInBytes(bytes) + ", " + + timer.format() + ", " + df.format(rows / (elapsed / 1000.0)) + " rows/sec"); + } + } + + /** + * Run COPY FROM LOCAL statement + */ + public Integer runFromLocal(org.apache.doris.hplsql.HplsqlParser.Copy_from_local_stmtContext ctx) { + trace(ctx, "COPY FROM LOCAL"); + initFileOptions(ctx.copy_file_option()); + HashMap> srcFiles = new HashMap<>(); + String src = evalPop(ctx.copy_source(0)).toString(); + String dest = evalPop(ctx.copy_target()).toString(); + int srcItems = ctx.copy_source().size(); + for (int i = 0; i < srcItems; i++) { + createLocalFileList(srcFiles, evalPop(ctx.copy_source(i)).toString(), null); + } + if (info) { + info(ctx, "Files to copy: " + srcFiles.size() + " (" + + org.apache.doris.hplsql.Utils.formatSizeInBytes(srcSizeInBytes) + ")"); + } + if (srcFiles.size() == 0) { + exec.setHostCode(2); + return 2; + } + timer.start(); + File file = new File(); + FileSystem fs; + int succeed = 0; + int failed = 0; + long copiedSize = 0; + try { + fs = file.createFs(); + boolean multi = false; + if (srcFiles.size() > 1) { + multi = true; + } + for (Map.Entry> i : srcFiles.entrySet()) { + try { + Path s = new Path(i.getKey()); + Path d; + if (multi) { + String relativePath = i.getValue().getLeft(); + if (relativePath == null) { + d = new Path(dest, s.getName()); + } else { + d = new Path(dest, relativePath + Path.SEPARATOR + s.getName()); + } + } else { + // Path to file is specified (can be relative), + // so treat target as a file name (hadoop fs -put behavior) + if (srcItems == 1 && i.getKey().endsWith(src)) { + d = new Path(dest); + } else { // Source directory is specified, so treat the target as a directory + d = new Path(dest + Path.SEPARATOR + s.getName()); + } + } + fs.copyFromLocalFile(delete, overwrite, s, d); + succeed++; + long size = i.getValue().getRight(); + copiedSize += size; + if (info) { + info(ctx, "Copied: " + file.resolvePath(d) + " (" + + org.apache.doris.hplsql.Utils.formatSizeInBytes(size) + ")"); + } + } catch (IOException e) { + failed++; + if (!ignore) { + throw e; + } + } + } + } catch (IOException e) { + exec.signal(e); + exec.setHostCode(1); + return 1; + } finally { + long elapsed = timer.stop(); + if (info) { + info(ctx, "COPY completed: " + succeed + " succeed, " + failed + " failed, " + + timer.format() + ", " + org.apache.doris.hplsql.Utils.formatSizeInBytes(copiedSize) + ", " + + org.apache.doris.hplsql.Utils.formatBytesPerSec(copiedSize, elapsed)); + } + if (failed == 0) { + exec.setHostCode(0); + } else { + exec.setHostCode(1); + } + file.close(); + } + return 0; + } + + /** + * Create the list of local files for the specified path (including subdirectories) + */ + void createLocalFileList(HashMap> list, String path, String relativePath) { + java.io.File file = new java.io.File(path); + if (file.exists()) { + if (file.isDirectory()) { + for (java.io.File i : file.listFiles()) { + if (i.isDirectory()) { + String rel; + if (relativePath == null) { + rel = i.getName(); + } else { + rel = relativePath + java.io.File.separator + i.getName(); + } + createLocalFileList(list, i.getAbsolutePath(), rel); + } else { + long size = i.length(); + list.put(i.getAbsolutePath(), Pair.of(relativePath, size)); + srcSizeInBytes += size; + } + } + } else { + long size = file.length(); + list.put(file.getAbsolutePath(), Pair.of(relativePath, size)); + srcSizeInBytes += size; + } + } + } + + /** + * Initialize COPY command options + */ + void initOptions(org.apache.doris.hplsql.HplsqlParser.Copy_stmtContext ctx) { + int cnt = ctx.copy_option().size(); + for (int i = 0; i < cnt; i++) { + org.apache.doris.hplsql.HplsqlParser.Copy_optionContext option = ctx.copy_option(i); + if (option.T_DELIMITER() != null) { + delimiter = StringEscapeUtils.unescapeJava(evalPop(option.expr()).toString()); + } else if (option.T_SQLINSERT() != null) { + sqlInsert = true; + delimiter = ", "; + if (option.qident() != null) { + sqlInsertName = option.qident().getText(); + } + } else if (option.T_AT() != null) { + targetConn = option.qident().getText(); + if (ctx.copy_target().expr() != null) { + sqlInsertName = evalPop(ctx.copy_target().expr()).toString(); + } else { + sqlInsertName = ctx.copy_target().getText(); + } + } else if (option.T_BATCHSIZE() != null) { + batchSize = evalPop(option.expr()).intValue(); + } + } + } + + /** + * Initialize COPY FILE options + */ + void initFileOptions(List options) { + srcSizeInBytes = 0; + for (org.apache.doris.hplsql.HplsqlParser.Copy_file_optionContext i : options) { + if (i.T_OVERWRITE() != null) { + overwrite = true; + } else if (i.T_DELETE() != null) { + delete = true; + } else if (i.T_IGNORE() != null) { + ignore = true; + } + } + } + + /** + * Evaluate the expression and pop value from the stack + */ + Var evalPop(ParserRuleContext ctx) { + exec.visit(ctx); + if (!exec.stack.isEmpty()) { + return exec.stackPop(); + } + return Var.Empty; + } + + /** + * Trace and information + */ + public void trace(ParserRuleContext ctx, String message) { + exec.trace(ctx, message); + } + + public void info(ParserRuleContext ctx, String message) { + exec.info(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Cursor.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Cursor.java new file mode 100644 index 00000000000000..257300c47cdedf --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Cursor.java @@ -0,0 +1,127 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Cursor.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; + +import org.antlr.v4.runtime.ParserRuleContext; + +public class Cursor { + private String sql; + private ParserRuleContext sqlExpr; + private ParserRuleContext sqlSelect; + private boolean withReturn = false; + private QueryResult queryResult; + + public enum State { + OPEN, FETCHED_OK, FETCHED_NODATA, CLOSE + } + + State state = State.CLOSE; + + public Cursor(String sql) { + this.sql = sql; + } + + public void setExprCtx(ParserRuleContext sqlExpr) { + this.sqlExpr = sqlExpr; + } + + public void setSelectCtx(ParserRuleContext sqlSelect) { + this.sqlSelect = sqlSelect; + } + + public void setWithReturn(boolean withReturn) { + this.withReturn = withReturn; + } + + public ParserRuleContext getSqlExpr() { + return sqlExpr; + } + + public ParserRuleContext getSqlSelect() { + return sqlSelect; + } + + public boolean isWithReturn() { + return withReturn; + } + + public void setSql(String sql) { + this.sql = sql; + } + + public String getSql() { + return sql; + } + + public void open(QueryExecutor queryExecutor, org.apache.doris.hplsql.HplsqlParser.Open_stmtContext ctx) { + this.queryResult = queryExecutor.executeQuery(sql, ctx); + this.state = State.OPEN; + } + + public QueryResult getQueryResult() { + return queryResult; + } + + /** + * Set the fetch status + */ + public void setFetch(boolean ok) { + if (ok) { + state = State.FETCHED_OK; + } else { + state = State.FETCHED_NODATA; + } + } + + public Boolean isFound() { + if (state == State.OPEN || state == State.CLOSE) { + return null; + } + if (state == State.FETCHED_OK) { + return Boolean.TRUE; + } + return Boolean.FALSE; + } + + public Boolean isNotFound() { + if (state == State.OPEN || state == State.CLOSE) { + return null; + } + if (state == State.FETCHED_NODATA) { + return Boolean.TRUE; + } + return Boolean.FALSE; + } + + public void close() { + if (queryResult != null) { + queryResult.close(); + state = State.CLOSE; + } + } + + public boolean isOpen() { + return state != State.CLOSE; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Exec.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Exec.java new file mode 100644 index 00000000000000..99818598281d31 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Exec.java @@ -0,0 +1,2914 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.HplsqlParser.StmtContext; +import org.apache.doris.hplsql.Var.Type; +import org.apache.doris.hplsql.exception.HplValidationException; +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.hplsql.exception.TypeException; +import org.apache.doris.hplsql.exception.UndefinedIdentException; +import org.apache.doris.hplsql.executor.JdbcQueryExecutor; +import org.apache.doris.hplsql.executor.Metadata; +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; +import org.apache.doris.hplsql.executor.ResultListener; +import org.apache.doris.hplsql.functions.BuiltinFunctions; +import org.apache.doris.hplsql.functions.DorisFunctionRegistry; +import org.apache.doris.hplsql.functions.FunctionDatetime; +import org.apache.doris.hplsql.functions.FunctionMisc; +import org.apache.doris.hplsql.functions.FunctionRegistry; +import org.apache.doris.hplsql.functions.FunctionString; +import org.apache.doris.hplsql.functions.InMemoryFunctionRegistry; +import org.apache.doris.hplsql.objects.DbmOutput; +import org.apache.doris.hplsql.objects.DbmOutputClass; +import org.apache.doris.hplsql.objects.HplObject; +import org.apache.doris.hplsql.objects.Method; +import org.apache.doris.hplsql.objects.MethodDictionary; +import org.apache.doris.hplsql.objects.MethodParams; +import org.apache.doris.hplsql.objects.Table; +import org.apache.doris.hplsql.objects.TableClass; +import org.apache.doris.hplsql.objects.UtlFile; +import org.apache.doris.hplsql.objects.UtlFileClass; +import org.apache.doris.hplsql.packages.DorisPackageRegistry; +import org.apache.doris.hplsql.packages.InMemoryPackageRegistry; +import org.apache.doris.hplsql.packages.PackageRegistry; +import org.apache.doris.hplsql.store.MetaClient; + +import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.misc.NotNull; +import org.antlr.v4.runtime.tree.ParseTree; +import org.antlr.v4.runtime.tree.TerminalNode; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.io.ByteArrayInputStream; +import java.io.Closeable; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintWriter; +import java.io.UncheckedIOException; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Stack; +import java.util.stream.Collectors; + +/** + * HPL/SQL script executor + */ +public class Exec extends org.apache.doris.hplsql.HplsqlBaseVisitor implements Closeable { + + public static final String VERSION = "HPL/SQL 0.3.31"; + public static final String ERRORCODE = "ERRORCODE"; + public static final String SQLCODE = "SQLCODE"; + public static final String SQLSTATE = "SQLSTATE"; + public static final String HOSTCODE = "HOSTCODE"; + + Exec exec; + FunctionRegistry functions; + private BuiltinFunctions builtinFunctions; + private MetaClient client; + QueryExecutor queryExecutor; + private PackageRegistry packageRegistry = new InMemoryPackageRegistry(); + private boolean packageLoading = false; + private Map types = new HashMap<>(); + + public enum OnError { + EXCEPTION, SETERROR, STOP + } + + // Scopes of execution (code blocks) with own local variables, parameters and exception handlers + Stack scopes = new Stack<>(); + Scope globalScope; + Scope currentScope; + + Stack stack = new Stack<>(); + Stack labels = new Stack<>(); + Stack callStack = new Stack<>(); + + Stack signals = new Stack<>(); + Signal currentSignal; + Scope currentHandlerScope; + boolean resignal = false; + + HashMap managedTables = new HashMap<>(); + HashMap objectMap = new HashMap<>(); + HashMap objectConnMap = new HashMap<>(); + HashMap> returnCursors = new HashMap<>(); + HashMap packages = new HashMap<>(); + + Package currentPackageDecl = null; + + public ArrayList stmtConnList = new ArrayList<>(); + + Arguments arguments = new Arguments(); + public Conf conf; + Expression expr; + Converter converter; + Meta meta; + Select select; + Stmt stmt; + Conn conn; + Console console = Console.STANDARD; + ResultListener resultListener = ResultListener.NONE; + + int rowCount = 0; + + StringBuilder localUdf = new StringBuilder(); + boolean initRoutines = false; + public boolean buildSql = false; + public boolean inCallStmt = false; + boolean udfRegistered = false; + boolean udfRun = false; + + boolean dotHplsqlrcExists = false; + boolean hplsqlrcExists = false; + + boolean trace = false; + boolean info = true; + boolean offline = false; + + StmtContext lastStmt = null; + + public Exec() { + exec = this; + queryExecutor = new JdbcQueryExecutor(this); + } + + public Exec(Conf conf, Console console, QueryExecutor queryExecutor, ResultListener resultListener) { + this.conf = conf; + this.exec = this; + this.console = console; + this.queryExecutor = queryExecutor; + this.resultListener = resultListener; + this.client = new MetaClient(); + } + + Exec(Exec exec) { + this.exec = exec; + this.console = exec.console; + this.queryExecutor = exec.queryExecutor; + this.client = exec.client; + } + + /** + * Set a variable using a value from the parameter or the stack + */ + public Var setVariable(String name, Var value) { + if (value == null || value == Var.Empty) { + if (exec.stack.empty()) { + return Var.Empty; + } + value = exec.stack.pop(); + } + if (name.startsWith("hplsql.")) { + exec.conf.setOption(name, value.toString()); + return Var.Empty; + } + Var var = findVariable(name); + if (var != null) { + var.cast(value); + } else { + var = new Var(value); + var.setName(name); + if (exec.currentScope != null) { + exec.currentScope.addVariable(var); + } + } + return var; + } + + public Var setVariable(String name) { + return setVariable(name, Var.Empty); + } + + public Var setVariable(String name, String value) { + return setVariable(name, new Var(value)); + } + + public Var setVariable(String name, int value) { + return setVariable(name, new Var(Long.valueOf(value))); + } + + /** + * Set variable to NULL + */ + public Var setVariableToNull(String name) { + Var var = findVariable(name); + if (var != null) { + var.removeValue(); + } else { + var = new Var(); + var.setName(name); + if (exec.currentScope != null) { + exec.currentScope.addVariable(var); + } + } + return var; + } + + /** + * Add a local variable to the current scope + */ + public void addVariable(Var var) { + if (currentPackageDecl != null) { + currentPackageDecl.addVariable(var); + } else if (exec.currentScope != null) { + exec.currentScope.addVariable(var); + } + } + + /** + * Add a condition handler to the current scope + */ + public void addHandler(Handler handler) { + if (exec.currentScope != null) { + exec.currentScope.addHandler(handler); + } + } + + /** + * Add a return cursor visible to procedure callers and clients + */ + public void addReturnCursor(Var var) { + String routine = callStackPeek(); + ArrayList cursors = returnCursors.computeIfAbsent(routine, k -> new ArrayList<>()); + cursors.add(var); + } + + /** + * Get the return cursor defined in the specified procedure + */ + public Var consumeReturnCursor(String routine) { + ArrayList cursors = returnCursors.get(routine.toUpperCase()); + if (cursors == null) { + return null; + } + Var var = cursors.get(0); + cursors.remove(0); + return var; + } + + /** + * Push a value to the stack + */ + public void stackPush(Var var) { + exec.stack.push(var); + } + + /** + * Push a string value to the stack + */ + public void stackPush(String val) { + exec.stack.push(new Var(val)); + } + + public void stackPush(StringBuilder val) { + stackPush(val.toString()); + } + + /** + * Push a boolean value to the stack + */ + public void stackPush(Boolean val) { + exec.stack.push(new Var(val)); + } + + /** + * Select a value from the stack, but not remove + */ + public Var stackPeek() { + return exec.stack.peek(); + } + + /** + * Pop a value from the stack + */ + public Var stackPop() { + if (!exec.stack.isEmpty()) { + return exec.stack.pop(); + } + return Var.Empty; + } + + /** + * Push a value to the call stack + */ + public void callStackPush(String val) { + exec.callStack.push(val.toUpperCase()); + } + + /** + * Select a value from the call stack, but not remove + */ + public String callStackPeek() { + if (!exec.callStack.isEmpty()) { + return exec.callStack.peek(); + } + return null; + } + + /** + * Pop a value from the call stack + */ + public String callStackPop() { + if (!exec.callStack.isEmpty()) { + return exec.callStack.pop(); + } + return null; + } + + /** + * Find an existing variable by name + */ + public Var findVariable(String name) { + Var var; + String name1 = name.toUpperCase(); + String name1a = null; + String name2 = null; + Scope cur = exec.currentScope; + Package pack; + Package packCallContext = exec.getPackageCallContext(); + ArrayList qualified = exec.meta.splitIdentifier(name); + if (qualified != null) { + name1 = qualified.get(0).toUpperCase(); + name2 = qualified.get(1).toUpperCase(); + pack = findPackage(name1); + if (pack != null) { + var = pack.findVariable(name2); + if (var != null) { + return var; + } + } + } + if (name1.startsWith(":")) { + name1a = name1.substring(1); + } + while (cur != null) { + var = findVariable(cur.vars, name1); + if (var == null && name1a != null) { + var = findVariable(cur.vars, name1a); + } + if (var == null && packCallContext != null) { + var = packCallContext.findVariable(name1); + } + if (var != null) { + return var; + } + if (cur.type == Scope.Type.ROUTINE) { + cur = exec.globalScope; + } else { + cur = cur.parent; + } + } + return null; + } + + public Var findVariable(Var name) { + return findVariable(name.getName()); + } + + Var findVariable(Map vars, String name) { + return vars.get(name.toUpperCase()); + } + + /** + * Find a cursor variable by name + */ + public Var findCursor(String name) { + Var cursor = exec.findVariable(name); + if (cursor != null && cursor.type == Type.CURSOR) { + return cursor; + } + return null; + } + + /** + * Find the package by name + */ + Package findPackage(String name) { + Package pkg = packages.get(name.toUpperCase()); + if (pkg != null) { + return pkg; + } + Optional source = exec.packageRegistry.getPackage(name); + if (source.isPresent()) { + org.apache.doris.hplsql.HplsqlLexer lexer = new org.apache.doris.hplsql.HplsqlLexer( + new ANTLRInputStream(source.get())); + CommonTokenStream tokens = new CommonTokenStream(lexer); + org.apache.doris.hplsql.HplsqlParser parser = newParser(tokens); + exec.packageLoading = true; + try { + visit(parser.program()); + } finally { + exec.packageLoading = false; + } + } else { + return null; + } + return packages.get(name.toUpperCase()); + } + + /** + * Enter a new scope + */ + public void enterScope(Scope scope) { + exec.scopes.push(scope); + } + + public void enterScope(Scope.Type type) { + enterScope(type, null); + } + + public void enterScope(Scope.Type type, Package pack) { + exec.currentScope = new Scope(exec.currentScope, type, pack); + enterScope(exec.currentScope); + } + + public void enterGlobalScope() { + globalScope = new Scope(Scope.Type.GLOBAL); + currentScope = globalScope; + enterScope(globalScope); + } + + /** + * Leave the current scope + */ + public void leaveScope() { + if (!exec.signals.empty()) { + Scope scope = exec.scopes.peek(); + Signal signal = exec.signals.peek(); + if (exec.conf.onError != OnError.SETERROR) { + runExitHandler(); + } + if (signal.type == Signal.Type.LEAVE_ROUTINE && scope.type == Scope.Type.ROUTINE) { + exec.signals.pop(); + } + } + exec.currentScope = exec.scopes.pop().getParent(); + } + + /** + * Send a signal + */ + public void signal(Signal signal) { + exec.signals.push(signal); + } + + public void signal(Signal.Type type, String value, Exception exception) { + signal(new Signal(type, value, exception)); + } + + public void signal(Signal.Type type, String value) { + setSqlCode(SqlCodes.ERROR); + signal(type, value, null); + } + + public void signal(Signal.Type type) { + setSqlCode(SqlCodes.ERROR); + signal(type, null, null); + } + + public void signal(Query query) { + setSqlCode(query.getException()); + signal(Signal.Type.SQLEXCEPTION, query.errorText(), query.getException()); + } + + public void signal(QueryResult query) { + setSqlCode(query.exception()); + signal(Signal.Type.SQLEXCEPTION, query.errorText(), query.exception()); + } + + public void signal(Exception exception) { + setSqlCode(exception); + signal(Signal.Type.SQLEXCEPTION, exception.getMessage(), exception); + } + + /** + * Resignal the condition + */ + public void resignal() { + resignal(exec.currentSignal); + } + + public void resignal(Signal signal) { + if (signal != null) { + exec.resignal = true; + signal(signal); + } + } + + /** + * Run CONTINUE handlers + */ + boolean runContinueHandler() { + Scope cur = exec.currentScope; + exec.currentSignal = exec.signals.pop(); + while (cur != null) { + for (Handler h : cur.handlers) { + if (h.execType != Handler.ExecType.CONTINUE) { + continue; + } + if ((h.type != Signal.Type.USERDEFINED && h.type == exec.currentSignal.type) + || (h.type == Signal.Type.USERDEFINED && h.type == exec.currentSignal.type + && h.value.equalsIgnoreCase(exec.currentSignal.value))) { + trace(h.ctx, "CONTINUE HANDLER"); + enterScope(Scope.Type.HANDLER); + exec.currentHandlerScope = h.scope; + visit(h.ctx.single_block_stmt()); + leaveScope(); + exec.currentSignal = null; + return true; + } + } + cur = cur.parent; + } + exec.signals.push(exec.currentSignal); + exec.currentSignal = null; + return false; + } + + /** + * Run EXIT handler defined for the current scope + */ + boolean runExitHandler() { + exec.currentSignal = exec.signals.pop(); + for (Handler h : currentScope.handlers) { + if (h.execType != Handler.ExecType.EXIT) { + continue; + } + if ((h.type != Signal.Type.USERDEFINED && h.type == exec.currentSignal.type) + || (h.type == Signal.Type.USERDEFINED && h.type == exec.currentSignal.type + && h.value.equalsIgnoreCase(currentSignal.value))) { + trace(h.ctx, "EXIT HANDLER"); + enterScope(Scope.Type.HANDLER); + exec.currentHandlerScope = h.scope; + visit(h.ctx.single_block_stmt()); + leaveScope(); + exec.currentSignal = null; + return true; + } + } + exec.signals.push(exec.currentSignal); + exec.currentSignal = null; + return false; + } + + /** + * Pop the last signal + */ + public Signal signalPop() { + if (!exec.signals.empty()) { + return exec.signals.pop(); + } + return null; + } + + /** + * Peek the last signal + */ + public Signal signalPeek() { + if (!exec.signals.empty()) { + return exec.signals.peek(); + } + return null; + } + + /** + * Pop the current label + */ + public String labelPop() { + if (!exec.labels.empty()) { + return exec.labels.pop(); + } + return ""; + } + + /** + * Execute a SQL query (SELECT) + */ + public Query executeQuery(ParserRuleContext ctx, Query query, String connProfile) { + if (!exec.offline) { + exec.rowCount = 0; + exec.conn.executeQuery(query, connProfile); + return query; + } + setSqlNoData(); + info(ctx, "Not executed - offline mode set"); + return query; + } + + /** + * Register JARs, FILEs and CREATE TEMPORARY FUNCTION for UDF call + */ + public void registerUdf() { + if (udfRegistered) { + return; + } + ArrayList sql = new ArrayList<>(); + String dir = Utils.getExecDir(); + String hplsqlJarName = "hplsql.jar"; + for (String jarName : new java.io.File(dir).list()) { + if (jarName.startsWith("hive-hplsql") && jarName.endsWith(".jar")) { + hplsqlJarName = jarName; + break; + } + } + sql.add("ADD JAR " + dir + hplsqlJarName); + sql.add("ADD JAR " + dir + "antlr4-runtime-4.5.jar"); + if (!conf.getLocation().equals("")) { + sql.add("ADD FILE " + conf.getLocation()); + } else { + sql.add("ADD FILE " + dir + Conf.SITE_XML); + } + if (dotHplsqlrcExists) { + sql.add("ADD FILE " + dir + Conf.DOT_HPLSQLRC); + } + if (hplsqlrcExists) { + sql.add("ADD FILE " + dir + Conf.HPLSQLRC); + } + String lu = createLocalUdf(); + if (lu != null) { + sql.add("ADD FILE " + lu); + } + sql.add("CREATE TEMPORARY FUNCTION hplsql AS 'org.apache.doris.udf.hplsql.Udf'"); + exec.conn.addPreSql(exec.conf.defaultConnection, sql); + udfRegistered = true; + } + + /** + * Initialize options + */ + void initOptions() { + for (Entry item : exec.conf) { + String key = item.getKey(); + String value = item.getValue(); + if (key == null || value == null || !key.startsWith("hplsql.")) { + continue; + } else if (key.compareToIgnoreCase(Conf.CONN_DEFAULT) == 0) { + exec.conf.defaultConnection = value; + } else if (key.startsWith("hplsql.conn.init.")) { + exec.conn.addConnectionInit(key.substring(17), value); + } else if (key.startsWith(Conf.CONN_CONVERT)) { + exec.conf.setConnectionConvert(key.substring(20), value); + } else if (key.startsWith("hplsql.conn.")) { + String name = key.substring(12); + exec.conn.addConnection(name, value); + } else if (key.startsWith("hplsql.")) { + exec.conf.setOption(key, value); + } + } + } + + /** + * Set SQLCODE + */ + public void setSqlCode(int sqlcode) { + Long code = (long) sqlcode; + Var var = findVariable(SQLCODE); + if (var != null) { + var.setValue(code); + } + var = findVariable(ERRORCODE); + if (var != null) { + var.setValue(code); + } + } + + public void setSqlCode(Exception exception) { + if (exception instanceof QueryException) { + setSqlCode(((QueryException) exception).getErrorCode()); + setSqlState(((QueryException) exception).getSQLState()); + } else { + setSqlCode(SqlCodes.ERROR); + setSqlState("02000"); + } + } + + /** + * Set SQLSTATE + */ + public void setSqlState(String sqlstate) { + Var var = findVariable(SQLSTATE); + if (var != null) { + var.setValue(sqlstate); + } + } + + public void setResultListener(ResultListener resultListener) { + select.setResultListener(resultListener); + } + + /** + * Set HOSTCODE + */ + public void setHostCode(int code) { + Var var = findVariable(HOSTCODE); + if (var != null) { + var.setValue(Long.valueOf(code)); + } + } + + /** + * Set successful execution for SQL + */ + public void setSqlSuccess() { + setSqlCode(SqlCodes.SUCCESS); + setSqlState("00000"); + } + + /** + * Set SQL_NO_DATA as the result of SQL execution + */ + public void setSqlNoData() { + setSqlCode(SqlCodes.NO_DATA_FOUND); + setSqlState("01000"); + } + + public Integer run(String[] args) throws Exception { + if (!parseArguments(args)) { + return -1; + } + init(); + try { + parseAndEval(arguments); + } finally { + close(); + } + return getProgramReturnCode(); + } + + public Var parseAndEval(Arguments arguments) { + ParseTree tree; + try (InputStream input = sourceStream(arguments)) { + tree = parse(input); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + Var result = null; + try { + result = evaluate(tree, arguments.main); + } catch (HplValidationException e) { + signal(Signal.Type.VALIDATION, e.getMessage(), e); + } + if (result != null) { + console.printLine(result.toString()); + } + return result; + } + + private Var evaluate(ParseTree tree, String execMain) { + if (tree == null) { + return null; + } + if (execMain != null) { + initRoutines = true; + visit(tree); + initRoutines = false; + exec.functions.exec(execMain.toUpperCase(), null); + } else { + visit(tree); + } + if (!exec.stack.isEmpty()) { + return exec.stackPop(); + } + return null; + } + + @Override + public void close() { + leaveScope(); + cleanup(); + printExceptions(); + } + + private InputStream sourceStream(Arguments arguments) throws FileNotFoundException { + return arguments.execString != null + ? new ByteArrayInputStream(arguments.execString.getBytes(StandardCharsets.UTF_8)) + : new FileInputStream(arguments.fileName); + } + + /** + * Initialize PL/HQL + */ + public void init() { + enterGlobalScope(); + // specify the default log4j2 properties file. + System.setProperty("log4j.configurationFile", "hive-log4j2.properties"); + if (conf == null) { + conf = new Conf(); + } + conf.init(); + conn = new Conn(this); + meta = new Meta(this, queryExecutor); + initOptions(); + + expr = new Expression(this); + select = new Select(this, queryExecutor); + select.setResultListener(resultListener); + stmt = new Stmt(this, queryExecutor); + converter = new Converter(this); + + builtinFunctions = new BuiltinFunctions(this, queryExecutor); + new FunctionDatetime(this, queryExecutor).register(builtinFunctions); + new FunctionMisc(this, queryExecutor).register(builtinFunctions); + new FunctionString(this, queryExecutor).register(builtinFunctions); + if (client != null) { + functions = new DorisFunctionRegistry(this, client, builtinFunctions); + packageRegistry = new DorisPackageRegistry(client); + } else { + functions = new InMemoryFunctionRegistry(this, builtinFunctions); + } + addVariable(new Var(ERRORCODE, Var.Type.BIGINT, 0L)); + addVariable(new Var(SQLCODE, Var.Type.BIGINT, 0L)); + addVariable(new Var(SQLSTATE, Var.Type.STRING, "00000")); + addVariable(new Var(HOSTCODE, Var.Type.BIGINT, 0L)); + for (Map.Entry v : arguments.getVars().entrySet()) { + addVariable(new Var(v.getKey(), Var.Type.STRING, v.getValue())); + } + includeRcFile(); + registerBuiltins(); + } + + private ParseTree parse(InputStream input) throws IOException { + org.apache.doris.hplsql.HplsqlLexer lexer = new org.apache.doris.hplsql.HplsqlLexer( + new ANTLRInputStream(input)); + CommonTokenStream tokens = new CommonTokenStream(lexer); + org.apache.doris.hplsql.HplsqlParser parser = newParser(tokens); + ParseTree tree = parser.program(); + if (trace) { + console.printError("Configuration file: " + conf.getLocation()); + console.printError("Parser tree: " + tree.toStringTree(parser)); + } + return tree; + } + + protected void registerBuiltins() { + Var dbmVar = new Var(Type.HPL_OBJECT, "DBMS_OUTPUT"); + DbmOutput dbms = DbmOutputClass.INSTANCE.newInstance(); + dbms.initialize(console); + dbmVar.setValue(dbms); + dbmVar.setConstant(true); + addVariable(dbmVar); + + Var utlFileVar = new Var(Type.HPL_OBJECT, "UTL_FILE"); + UtlFile utlFile = UtlFileClass.INSTANCE.newInstance(); + utlFileVar.setValue(utlFile); + utlFileVar.setConstant(true); + addVariable(utlFileVar); + } + + private org.apache.doris.hplsql.HplsqlParser newParser(CommonTokenStream tokens) { + org.apache.doris.hplsql.HplsqlParser parser = new org.apache.doris.hplsql.HplsqlParser(tokens); + // the default listener logs into stdout, overwrite it with a custom listener that uses beeline console + parser.removeErrorListeners(); + parser.addErrorListener(new SyntaxErrorReporter(console)); + return parser; + } + + /** + * Parse command line arguments + */ + boolean parseArguments(String[] args) { + boolean parsed = arguments.parse(args); + if (parsed && arguments.hasVersionOption()) { + console.printError(VERSION); + return false; + } + if (!parsed || arguments.hasHelpOption() + || (arguments.getExecString() == null && arguments.getFileName() == null)) { + arguments.printHelp(); + return false; + } + String execString = arguments.getExecString(); + String execFile = arguments.getFileName(); + if (arguments.hasTraceOption()) { + trace = true; + } + if (arguments.hasOfflineOption()) { + offline = true; + } + if (execString != null && execFile != null) { + console.printError("The '-e' and '-f' options cannot be specified simultaneously."); + return false; + } + return true; + } + + /** + * Include statements from .hplsqlrc and hplsql rc files + */ + void includeRcFile() { + if (includeFile(Conf.DOT_HPLSQLRC, false)) { + dotHplsqlrcExists = true; + } else { + if (includeFile(Conf.HPLSQLRC, false)) { + hplsqlrcExists = true; + } + } + if (udfRun) { + includeFile(Conf.HPLSQL_LOCALS_SQL, true); + } + } + + /** + * Include statements from a file + */ + boolean includeFile(String file, boolean showError) { + try { + String content = FileUtils.readFileToString(new java.io.File(file), "UTF-8"); + if (content != null && !content.isEmpty()) { + if (trace) { + trace(null, "INCLUDE CONTENT " + file + " (non-empty)"); + } + new Exec(this).include(content); + return true; + } + } catch (Exception e) { + if (showError) { + error(null, "INCLUDE file error: " + e.getMessage()); + } + } + return false; + } + + /** + * Execute statements from an include file + */ + void include(String content) throws Exception { + InputStream input = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); + org.apache.doris.hplsql.HplsqlLexer lexer = new org.apache.doris.hplsql.HplsqlLexer( + new ANTLRInputStream(input)); + CommonTokenStream tokens = new CommonTokenStream(lexer); + org.apache.doris.hplsql.HplsqlParser parser = newParser(tokens); + ParseTree tree = parser.program(); + visit(tree); + } + + /** + * Start executing HPL/SQL script + */ + @Override + public Integer visitProgram(org.apache.doris.hplsql.HplsqlParser.ProgramContext ctx) { + if (ctx.block() != null) { + // Record the last stmt. When mysql protocol returns multiple result sets, + // SERVER_MORE_RESULTS_EXISTS should be specified when sending results other than the last stmt. + List stmtContexts = ctx.block().stmt(); + for (int i = stmtContexts.size() - 1; i >= 0; --i) { + if (stmtContexts.get(i).semicolon_stmt() == null) { + lastStmt = stmtContexts.get(i); + break; + } + } + } + return visitChildren(ctx); + } + + /** + * Enter BEGIN-END block + */ + @Override + public Integer visitBegin_end_block(org.apache.doris.hplsql.HplsqlParser.Begin_end_blockContext ctx) { + enterScope(Scope.Type.BEGIN_END); + Integer rc = visitChildren(ctx); + leaveScope(); + return rc; + } + + /** + * Free resources before exit + */ + void cleanup() { + for (Map.Entry i : managedTables.entrySet()) { + String sql = "DROP TABLE IF EXISTS " + i.getValue(); + QueryResult query = queryExecutor.executeQuery(sql, null); + query.close(); + if (trace) { + trace(null, sql); + } + } + } + + /** + * Output information about unhandled exceptions + */ + public void printExceptions() { + while (!signals.empty()) { + Signal sig = signals.pop(); + if (sig.type == Signal.Type.VALIDATION) { + error(((HplValidationException) sig.exception).getCtx(), sig.exception.getMessage()); + } else if (sig.type == Signal.Type.SQLEXCEPTION) { + console.printError("Unhandled exception in HPL/SQL. " + ExceptionUtils.getStackTrace(sig.exception)); + } else if (sig.type == Signal.Type.UNSUPPORTED_OPERATION) { + console.printError(sig.value == null ? "Unsupported operation" : sig.value); + } else if (sig.exception != null) { + console.printError("HPL/SQL error: " + ExceptionUtils.getStackTrace(sig.exception)); + } else if (sig.value != null) { + console.printError(sig.value); + } else { + trace(null, "Signal: " + sig.type); + } + } + } + + /** + * Get the program return code + */ + Integer getProgramReturnCode() { + int rc = 0; + if (!signals.empty()) { + Signal sig = signals.pop(); + if ((sig.type == Signal.Type.LEAVE_PROGRAM || sig.type == Signal.Type.LEAVE_ROUTINE) + && sig.value != null) { + try { + rc = Integer.parseInt(sig.value); + } catch (NumberFormatException e) { + rc = 1; + } + } + } + return rc; + } + + /** + * Executing a statement + */ + @Override + public Integer visitStmt(org.apache.doris.hplsql.HplsqlParser.StmtContext ctx) { + if (ctx.semicolon_stmt() != null) { + return 0; + } + if (initRoutines && ctx.create_procedure_stmt() == null && ctx.create_function_stmt() == null) { + return 0; + } + if (exec.resignal) { + if (exec.currentScope != exec.currentHandlerScope.parent) { + return 0; + } + exec.resignal = false; + } + if (!exec.signals.empty() && exec.conf.onError != OnError.SETERROR) { + if (!runContinueHandler()) { + return 0; + } + } + Var prev = stackPop(); + if (prev != null && prev.value != null) { + console.printLine(prev.toString()); + } + Integer rc = visitChildren(ctx); + if (ctx != lastStmt) { + // printExceptions(); + resultListener.onFinalize(); + console.flushConsole(); + } + return rc; + } + + /** + * Executing or building SELECT statement + */ + @Override + public Integer visitSelect_stmt(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx) { + return exec.select.select(ctx); + } + + @Override + public Integer visitCte_select_stmt(org.apache.doris.hplsql.HplsqlParser.Cte_select_stmtContext ctx) { + return exec.select.cte(ctx); + } + + @Override + public Integer visitFullselect_stmt(org.apache.doris.hplsql.HplsqlParser.Fullselect_stmtContext ctx) { + return exec.select.fullselect(ctx); + } + + @Override + public Integer visitSubselect_stmt(org.apache.doris.hplsql.HplsqlParser.Subselect_stmtContext ctx) { + return exec.select.subselect(ctx); + } + + @Override + public Integer visitSelect_list(org.apache.doris.hplsql.HplsqlParser.Select_listContext ctx) { + return exec.select.selectList(ctx); + } + + @Override + public Integer visitFrom_clause(org.apache.doris.hplsql.HplsqlParser.From_clauseContext ctx) { + return exec.select.from(ctx); + } + + @Override + public Integer visitFrom_table_name_clause(org.apache.doris.hplsql.HplsqlParser.From_table_name_clauseContext ctx) { + return exec.select.fromTable(ctx); + } + + @Override + public Integer visitFrom_subselect_clause(org.apache.doris.hplsql.HplsqlParser.From_subselect_clauseContext ctx) { + return exec.select.fromSubselect(ctx); + } + + @Override + public Integer visitFrom_join_clause(org.apache.doris.hplsql.HplsqlParser.From_join_clauseContext ctx) { + return exec.select.fromJoin(ctx); + } + + @Override + public Integer visitFrom_table_values_clause( + org.apache.doris.hplsql.HplsqlParser.From_table_values_clauseContext ctx) { + return exec.select.fromTableValues(ctx); + } + + @Override + public Integer visitWhere_clause(org.apache.doris.hplsql.HplsqlParser.Where_clauseContext ctx) { + return exec.select.where(ctx); + } + + @Override + public Integer visitSelect_options_item(org.apache.doris.hplsql.HplsqlParser.Select_options_itemContext ctx) { + return exec.select.option(ctx); + } + + /** + * Column name + */ + @Override + public Integer visitColumn_name(org.apache.doris.hplsql.HplsqlParser.Column_nameContext ctx) { + stackPush(meta.normalizeIdentifierPart(ctx.getText())); + return 0; + } + + /** + * Table name + */ + @Override + public Integer visitTable_name(org.apache.doris.hplsql.HplsqlParser.Table_nameContext ctx) { + String name = ctx.getText(); + String nameUp = name.toUpperCase(); + String nameNorm = meta.normalizeObjectIdentifier(name); + String actualName = exec.managedTables.get(nameUp); + String conn = exec.objectConnMap.get(nameUp); + if (conn == null) { + conn = conf.defaultConnection; + } + stmtConnList.add(conn); + if (actualName != null) { + stackPush(actualName); + return 0; + } + actualName = exec.objectMap.get(nameUp); + if (actualName != null) { + stackPush(actualName); + return 0; + } + stackPush(nameNorm); + return 0; + } + + /** + * SQL INSERT statement + */ + @Override + public Integer visitInsert_stmt(org.apache.doris.hplsql.HplsqlParser.Insert_stmtContext ctx) { + return exec.stmt.insert(ctx); + } + + /** + * INSERT DIRECTORY statement + */ + @Override + public Integer visitInsert_directory_stmt(org.apache.doris.hplsql.HplsqlParser.Insert_directory_stmtContext ctx) { + return exec.stmt.insertDirectory(ctx); + } + + /** + * EXCEPTION block + */ + @Override + public Integer visitException_block_item(org.apache.doris.hplsql.HplsqlParser.Exception_block_itemContext ctx) { + if (exec.signals.empty()) { + return 0; + } + if (exec.conf.onError == OnError.SETERROR || exec.conf.onError == OnError.STOP) { + exec.signals.pop(); + return 0; + } + if (ctx.L_ID().toString().equalsIgnoreCase("OTHERS")) { + trace(ctx, "EXCEPTION HANDLER"); + exec.signals.pop(); + enterScope(Scope.Type.HANDLER); + visit(ctx.block()); + leaveScope(); + } + return 0; + } + + /** + * DECLARE variable statement + */ + @Override + public Integer visitDeclare_var_item(org.apache.doris.hplsql.HplsqlParser.Declare_var_itemContext ctx) { + String type = null; + TableClass userDefinedType = null; + Row row = null; + String len = null; + String scale = null; + Var defaultVar = null; + if (ctx.dtype().T_ROWTYPE() != null) { + row = meta.getRowDataType(ctx, exec.conf.defaultConnection, ctx.dtype().qident().getText()); + if (row == null) { + type = Var.DERIVED_ROWTYPE; + } + } else { + type = getDataType(ctx); + if (ctx.dtype_len() != null) { + len = ctx.dtype_len().L_INT(0).getText(); + if (ctx.dtype_len().L_INT(1) != null) { + scale = ctx.dtype_len().L_INT(1).getText(); + } + } + if (ctx.dtype_default() != null) { + defaultVar = evalPop(ctx.dtype_default()); + } + userDefinedType = types.get(type); + if (userDefinedType != null) { + type = Type.HPL_OBJECT.name(); + } + + } + int cnt = ctx.ident().size(); // Number of variables declared with the same data type and default + for (int i = 0; i < cnt; i++) { + String name = ctx.ident(i).getText(); + if (row == null) { + Var var = new Var(name, type, len, scale, defaultVar); + if (userDefinedType != null && defaultVar == null) { + var.setValue(userDefinedType.newInstance()); + } + exec.addVariable(var); + if (ctx.T_CONSTANT() != null) { + var.setConstant(true); + } + if (trace) { + if (defaultVar != null) { + trace(ctx, "DECLARE " + name + " " + type + " = " + var.toSqlString()); + } else { + trace(ctx, "DECLARE " + name + " " + type); + } + } + } else { + exec.addVariable(new Var(name, row)); + if (trace) { + trace(ctx, "DECLARE " + name + " " + ctx.dtype().getText()); + } + } + } + return 0; + } + + /** + * Get the variable data type + */ + String getDataType(org.apache.doris.hplsql.HplsqlParser.Declare_var_itemContext ctx) { + String type; + if (ctx.dtype().T_TYPE() != null) { + type = meta.getDataType(ctx, exec.conf.defaultConnection, ctx.dtype().qident().getText()); + if (type == null) { + type = Var.DERIVED_TYPE; + } + } else { + type = getFormattedText(ctx.dtype()); + } + return type; + } + + /** + * ALLOCATE CURSOR statement + */ + @Override + public Integer visitAllocate_cursor_stmt(org.apache.doris.hplsql.HplsqlParser.Allocate_cursor_stmtContext ctx) { + return exec.stmt.allocateCursor(ctx); + } + + /** + * ASSOCIATE LOCATOR statement + */ + @Override + public Integer visitAssociate_locator_stmt(org.apache.doris.hplsql.HplsqlParser.Associate_locator_stmtContext ctx) { + return exec.stmt.associateLocator(ctx); + } + + /** + * DECLARE cursor statement + */ + @Override + public Integer visitDeclare_cursor_item(org.apache.doris.hplsql.HplsqlParser.Declare_cursor_itemContext ctx) { + return exec.stmt.declareCursor(ctx); + } + + /** + * DESCRIBE statement + */ + @Override + public Integer visitDescribe_stmt(org.apache.doris.hplsql.HplsqlParser.Describe_stmtContext ctx) { + return exec.stmt.describe(ctx); + } + + /** + * DROP statement + */ + @Override + public Integer visitDrop_stmt(org.apache.doris.hplsql.HplsqlParser.Drop_stmtContext ctx) { + return exec.stmt.drop(ctx); + } + + /** + * OPEN cursor statement + */ + @Override + public Integer visitOpen_stmt(org.apache.doris.hplsql.HplsqlParser.Open_stmtContext ctx) { + return exec.stmt.open(ctx); + } + + /** + * FETCH cursor statement + */ + @Override + public Integer visitFetch_stmt(org.apache.doris.hplsql.HplsqlParser.Fetch_stmtContext ctx) { + return exec.stmt.fetch(ctx); + } + + /** + * CLOSE cursor statement + */ + @Override + public Integer visitClose_stmt(org.apache.doris.hplsql.HplsqlParser.Close_stmtContext ctx) { + return exec.stmt.close(ctx); + } + + /** + * CMP statement + */ + @Override + public Integer visitCmp_stmt(org.apache.doris.hplsql.HplsqlParser.Cmp_stmtContext ctx) { + return new Cmp(exec, queryExecutor).run(ctx); + } + + /** + * COPY statement + */ + @Override + public Integer visitCopy_stmt(org.apache.doris.hplsql.HplsqlParser.Copy_stmtContext ctx) { + return new Copy(exec, queryExecutor).run(ctx); + } + + /** + * COPY FROM LOCAL statement + */ + @Override + public Integer visitCopy_from_local_stmt(org.apache.doris.hplsql.HplsqlParser.Copy_from_local_stmtContext ctx) { + return new Copy(exec, queryExecutor).runFromLocal(ctx); + } + + /** + * DECLARE HANDLER statement + */ + @Override + public Integer visitDeclare_handler_item(org.apache.doris.hplsql.HplsqlParser.Declare_handler_itemContext ctx) { + trace(ctx, "DECLARE HANDLER"); + Handler.ExecType execType = Handler.ExecType.EXIT; + Signal.Type type = Signal.Type.SQLEXCEPTION; + String value = null; + if (ctx.T_CONTINUE() != null) { + execType = Handler.ExecType.CONTINUE; + } + if (ctx.ident() != null) { + type = Signal.Type.USERDEFINED; + value = ctx.ident().getText(); + } else if (ctx.T_NOT() != null && ctx.T_FOUND() != null) { + type = Signal.Type.NOTFOUND; + } + addHandler(new Handler(execType, type, value, exec.currentScope, ctx)); + return 0; + } + + /** + * DECLARE CONDITION + */ + @Override + public Integer visitDeclare_condition_item(org.apache.doris.hplsql.HplsqlParser.Declare_condition_itemContext ctx) { + return 0; + } + + /** + * DECLARE TEMPORARY TABLE statement + */ + @Override + public Integer visitDeclare_temporary_table_item( + org.apache.doris.hplsql.HplsqlParser.Declare_temporary_table_itemContext ctx) { + return exec.stmt.declareTemporaryTable(ctx); + } + + /** + * CREATE TABLE statement + */ + @Override + public Integer visitCreate_table_stmt(org.apache.doris.hplsql.HplsqlParser.Create_table_stmtContext ctx) { + return exec.stmt.createTable(ctx); + } + + @Override + public Integer visitCreate_table_options_hive_item( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_hive_itemContext ctx) { + return exec.stmt.createTableHiveOptions(ctx); + } + + @Override + public Integer visitCreate_table_options_ora_item( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_ora_itemContext ctx) { + return 0; + } + + @Override + public Integer visitCreate_table_options_td_item( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_td_itemContext ctx) { + return 0; + } + + @Override + public Integer visitCreate_table_options_mssql_item( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_mssql_itemContext ctx) { + return 0; + } + + @Override + public Integer visitCreate_table_options_db2_item( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_db2_itemContext ctx) { + return 0; + } + + @Override + public Integer visitCreate_table_options_mysql_item( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_mysql_itemContext ctx) { + return exec.stmt.createTableMysqlOptions(ctx); + } + + /** + * CREATE LOCAL TEMPORARY | VOLATILE TABLE statement + */ + @Override + public Integer visitCreate_local_temp_table_stmt( + org.apache.doris.hplsql.HplsqlParser.Create_local_temp_table_stmtContext ctx) { + return exec.stmt.createLocalTemporaryTable(ctx); + } + + /** + * ALTER TABLE statement + */ + @Override + public Integer visitAlter_table_stmt(org.apache.doris.hplsql.HplsqlParser.Alter_table_stmtContext ctx) { + return 0; + } + + /** + * CREATE DATABASE | SCHEMA statement + */ + @Override + public Integer visitCreate_database_stmt(org.apache.doris.hplsql.HplsqlParser.Create_database_stmtContext ctx) { + return exec.stmt.createDatabase(ctx); + } + + /** + * CREATE FUNCTION statement + */ + @Override + public Integer visitCreate_function_stmt(org.apache.doris.hplsql.HplsqlParser.Create_function_stmtContext ctx) { + exec.functions.addUserFunction(ctx); + addLocalUdf(ctx); + return 0; + } + + /** + * CREATE PACKAGE specification statement + */ + @Override + public Integer visitCreate_package_stmt(org.apache.doris.hplsql.HplsqlParser.Create_package_stmtContext ctx) { + String name = ctx.ident(0).getText().toUpperCase(); + if (exec.packageLoading) { + exec.currentPackageDecl = new Package(name, exec, builtinFunctions); + exec.packages.put(name, exec.currentPackageDecl); + exec.currentPackageDecl.createSpecification(ctx); + exec.currentPackageDecl = null; + } else { + trace(ctx, "CREATE PACKAGE"); + exec.packages.remove(name); + exec.packageRegistry.createPackageHeader(name, getFormattedText(ctx), ctx.T_REPLACE() != null); + } + return 0; + } + + /** + * CREATE PACKAGE body statement + */ + @Override + public Integer visitCreate_package_body_stmt( + org.apache.doris.hplsql.HplsqlParser.Create_package_body_stmtContext ctx) { + String name = ctx.ident(0).getText().toUpperCase(); + if (exec.packageLoading) { + exec.currentPackageDecl = exec.packages.get(name); + if (exec.currentPackageDecl == null) { + exec.currentPackageDecl = new Package(name, exec, builtinFunctions); + exec.currentPackageDecl.setAllMembersPublic(true); + exec.packages.put(name, exec.currentPackageDecl); + } + exec.currentPackageDecl.createBody(ctx); + exec.currentPackageDecl = null; + } else { + trace(ctx, "CREATE PACKAGE BODY"); + exec.packages.remove(name); + exec.packageRegistry.createPackageBody(name, getFormattedText(ctx), ctx.T_REPLACE() != null); + } + return 0; + } + + /** + * CREATE PROCEDURE statement + */ + @Override + public Integer visitCreate_procedure_stmt(org.apache.doris.hplsql.HplsqlParser.Create_procedure_stmtContext ctx) { + exec.functions.addUserProcedure(ctx); + addLocalUdf(ctx); // Add procedures as they can be invoked by functions + return 0; + } + + public void dropProcedure(org.apache.doris.hplsql.HplsqlParser.Drop_stmtContext ctx, String name, + boolean checkIfExists) { + if (checkIfExists && !functions.exists(name)) { + trace(ctx, name + " DOES NOT EXIST"); + return; + } + functions.remove(name); + trace(ctx, name + " DROPPED"); + } + + public void dropPackage(org.apache.doris.hplsql.HplsqlParser.Drop_stmtContext ctx, String name, + boolean checkIfExists) { + if (checkIfExists && !packageRegistry.getPackage(name).isPresent()) { + trace(ctx, name + " DOES NOT EXIST"); + return; + } + packages.remove(name); + packageRegistry.dropPackage(name); + trace(ctx, name + " DROPPED"); + } + + /** + * CREATE INDEX statement + */ + @Override + public Integer visitCreate_index_stmt(org.apache.doris.hplsql.HplsqlParser.Create_index_stmtContext ctx) { + return 0; + } + + /** + * Add functions and procedures defined in the current script + */ + void addLocalUdf(ParserRuleContext ctx) { + if (exec == this) { + localUdf.append(Exec.getFormattedText(ctx)); + localUdf.append("\n"); + } + } + + /** + * Save local functions and procedures to a file (will be added to the distributed cache) + */ + String createLocalUdf() { + if (localUdf.length() == 0) { + return null; + } + try { + String file = System.getProperty("user.dir") + "/" + Conf.HPLSQL_LOCALS_SQL; + PrintWriter writer = new PrintWriter(file, "UTF-8"); + writer.print(localUdf); + writer.close(); + return file; + } catch (Exception e) { + e.printStackTrace(); + } + return null; + } + + @Override + public Integer visitSet_doris_session_option( + org.apache.doris.hplsql.HplsqlParser.Set_doris_session_optionContext ctx) { + StringBuilder sql = new StringBuilder("set "); + for (int i = 0; i < ctx.getChildCount(); i++) { + sql.append(ctx.getChild(i).getText()).append(" "); + } + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + if (trace) { + trace(ctx, sql.toString()); + } + return 0; + } + + /** + * Assignment statement for single value + */ + @Override + public Integer visitAssignment_stmt_single_item( + org.apache.doris.hplsql.HplsqlParser.Assignment_stmt_single_itemContext ctx) { + String name = ctx.ident().getText(); + visit(ctx.expr()); + Var var = setVariable(name); + if (trace) { + trace(ctx, "SET " + name + " = " + var.toSqlString()); + } + return 0; + } + + /** + * Assignment statement for multiple values + */ + @Override + public Integer visitAssignment_stmt_multiple_item( + org.apache.doris.hplsql.HplsqlParser.Assignment_stmt_multiple_itemContext ctx) { + int cnt = ctx.ident().size(); + int ecnt = ctx.expr().size(); + for (int i = 0; i < cnt; i++) { + String name = ctx.ident(i).getText(); + if (i < ecnt) { + visit(ctx.expr(i)); + Var var = setVariable(name); + if (trace) { + trace(ctx, "SET " + name + " = " + var.toString()); + } + } + } + return 0; + } + + /** + * Assignment from SELECT statement + */ + @Override + public Integer visitAssignment_stmt_select_item( + org.apache.doris.hplsql.HplsqlParser.Assignment_stmt_select_itemContext ctx) { + return stmt.assignFromSelect(ctx); + } + + @Override + public Integer visitAssignment_stmt_collection_item( + org.apache.doris.hplsql.HplsqlParser.Assignment_stmt_collection_itemContext ctx) { + org.apache.doris.hplsql.HplsqlParser.Expr_funcContext lhs = ctx.expr_func(); + Var var = findVariable(lhs.ident().getText()); + if (var == null || var.type != Type.HPL_OBJECT) { + stackPush(Var.Null); + return 0; + } + MethodParams.Arity.UNARY.check(lhs.ident().getText(), lhs.expr_func_params().func_param()); + Var index = evalPop(lhs.expr_func_params().func_param(0)); + Var value = evalPop(ctx.expr()); + dispatch(ctx, (HplObject) var.value, MethodDictionary.__SETITEM__, Arrays.asList(index, value)); + return 0; + } + + /** + * Evaluate an expression + */ + @Override + public Integer visitExpr(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + if (exec.buildSql) { + exec.expr.execSql(ctx); + } else { + exec.expr.exec(ctx); + } + return 0; + } + + /** + * Evaluate a boolean expression + */ + @Override + public Integer visitBool_expr(org.apache.doris.hplsql.HplsqlParser.Bool_exprContext ctx) { + if (exec.buildSql) { + exec.expr.execBoolSql(ctx); + } else { + exec.expr.execBool(ctx); + } + return 0; + } + + @Override + public Integer visitBool_expr_binary(org.apache.doris.hplsql.HplsqlParser.Bool_expr_binaryContext ctx) { + if (exec.buildSql) { + exec.expr.execBoolBinarySql(ctx); + } else { + exec.expr.execBoolBinary(ctx); + } + return 0; + } + + @Override + public Integer visitBool_expr_unary(org.apache.doris.hplsql.HplsqlParser.Bool_expr_unaryContext ctx) { + if (exec.buildSql) { + exec.expr.execBoolUnarySql(ctx); + } else { + exec.expr.execBoolUnary(ctx); + } + return 0; + } + + /** + * Static SELECT statement (i.e. unquoted) or expression + */ + @Override + public Integer visitExpr_select(org.apache.doris.hplsql.HplsqlParser.Expr_selectContext ctx) { + if (ctx.select_stmt() != null) { + stackPush(new Var(evalPop(ctx.select_stmt()))); + } else { + visit(ctx.expr()); + } + return 0; + } + + /** + * File path (unquoted) or expression + */ + @Override + public Integer visitExpr_file(org.apache.doris.hplsql.HplsqlParser.Expr_fileContext ctx) { + if (ctx.file_name() != null) { + stackPush(new Var(ctx.file_name().getText())); + } else { + visit(ctx.expr()); + } + return 0; + } + + /** + * Cursor attribute %ISOPEN, %FOUND and %NOTFOUND + */ + @Override + public Integer visitExpr_cursor_attribute(org.apache.doris.hplsql.HplsqlParser.Expr_cursor_attributeContext ctx) { + exec.expr.execCursorAttribute(ctx); + return 0; + } + + /** + * Function call + */ + @Override + public Integer visitExpr_func(org.apache.doris.hplsql.HplsqlParser.Expr_funcContext ctx) { + return functionCall(ctx, ctx.ident(), ctx.expr_func_params()); + } + + private int functionCall(ParserRuleContext ctx, org.apache.doris.hplsql.HplsqlParser.IdentContext ident, + org.apache.doris.hplsql.HplsqlParser.Expr_func_paramsContext params) { + String name = ident.getText(); + if (exec.buildSql) { + exec.execSql(name, params); + } else { + name = name.toUpperCase(); + Package packCallContext = exec.getPackageCallContext(); + ArrayList qualified = exec.meta.splitIdentifier(name); + boolean executed = false; + if (qualified != null) { + Package pack = findPackage(qualified.get(0)); + if (pack != null) { + executed = pack.execFunc(qualified.get(1), params); + } + } + if (!executed && packCallContext != null) { + executed = packCallContext.execFunc(name, params); + } + if (!executed) { + if (!exec.functions.exec(name, params)) { + Var var = findVariable(name); + if (var != null && var.type == Type.HPL_OBJECT) { + stackPush(dispatch(ctx, (HplObject) var.value, MethodDictionary.__GETITEM__, params)); + } else { + throw new UndefinedIdentException(ctx, name); + } + } + } + } + return 0; + } + + private Var dispatch(ParserRuleContext ctx, HplObject obj, String methodName, + org.apache.doris.hplsql.HplsqlParser.Expr_func_paramsContext paramCtx) { + List params = paramCtx == null + ? Collections.emptyList() + : paramCtx.func_param().stream().map(this::evalPop).collect(Collectors.toList()); + return dispatch(ctx, obj, methodName, params); + } + + private Var dispatch(ParserRuleContext ctx, HplObject obj, String methodName, List params) { + Method method = obj.hplClass().methodDictionary().get(ctx, methodName); + return method.call(obj, params); + } + + /** + * @return either 1 rowtype OR 1 single column table OR n single column tables + */ + public List intoTables(ParserRuleContext ctx, List names) { + List
tables = new ArrayList<>(); + for (String name : names) { + Var var = findVariable(name); + if (var == null) { + trace(ctx, "Variable not found: " + name); + } else if (var.type == Type.HPL_OBJECT && var.value instanceof Table) { + tables.add((Table) var.value); + } else { + throw new TypeException(ctx, Table.class, var.type, var.value); + } + } + if (tables.size() > 1 && tables.stream().anyMatch(tbl -> tbl.hplClass().rowType())) { + throw new TypeException(ctx, "rowtype table should not be used when selecting into multiple tables"); + } + return tables; + } + + /** + * User-defined function in a SQL query + */ + public void execSql(String name, org.apache.doris.hplsql.HplsqlParser.Expr_func_paramsContext ctx) { + if (execUserSql(ctx, name)) { + return; + } + StringBuilder sql = new StringBuilder(); + sql.append(name); + sql.append("("); + if (ctx != null) { + int cnt = ctx.func_param().size(); + for (int i = 0; i < cnt; i++) { + sql.append(evalPop(ctx.func_param(i).expr())); + if (i + 1 < cnt) { + sql.append(", "); + } + } + } + sql.append(")"); + exec.stackPush(sql); + } + + /** + * Execute a HPL/SQL user-defined function in a query. + * For example converts: select fn(col) from table to select hplsql('fn(:1)', col) from table + */ + private boolean execUserSql(org.apache.doris.hplsql.HplsqlParser.Expr_func_paramsContext ctx, String name) { + if (!functions.exists(name)) { + return false; + } + StringBuilder sql = new StringBuilder(); + sql.append("hplsql('"); + sql.append(name); + sql.append("("); + int cnt = ctx.func_param().size(); + for (int i = 0; i < cnt; i++) { + sql.append(":").append(i + 1); + if (i + 1 < cnt) { + sql.append(", "); + } + } + sql.append(")'"); + if (cnt > 0) { + sql.append(", "); + } + for (int i = 0; i < cnt; i++) { + sql.append(evalPop(ctx.func_param(i).expr())); + if (i + 1 < cnt) { + sql.append(", "); + } + } + sql.append(")"); + exec.stackPush(sql); + exec.registerUdf(); + return true; + } + + /** + * Aggregate or window function call + */ + @Override + public Integer visitExpr_agg_window_func(org.apache.doris.hplsql.HplsqlParser.Expr_agg_window_funcContext ctx) { + exec.stackPush(Exec.getFormattedText(ctx)); + return 0; + } + + /** + * Function with specific syntax + */ + @Override + public Integer visitExpr_spec_func(org.apache.doris.hplsql.HplsqlParser.Expr_spec_funcContext ctx) { + if (exec.buildSql) { + exec.builtinFunctions.specExecSql(ctx); + } else { + exec.builtinFunctions.specExec(ctx); + } + return 0; + } + + /** + * INCLUDE statement + */ + @Override + public Integer visitInclude_stmt(@NotNull org.apache.doris.hplsql.HplsqlParser.Include_stmtContext ctx) { + return exec.stmt.include(ctx); + } + + /** + * IF statement (PL/SQL syntax) + */ + @Override + public Integer visitIf_plsql_stmt(org.apache.doris.hplsql.HplsqlParser.If_plsql_stmtContext ctx) { + return exec.stmt.ifPlsql(ctx); + } + + /** + * IF statement (Transact-SQL syntax) + */ + @Override + public Integer visitIf_tsql_stmt(org.apache.doris.hplsql.HplsqlParser.If_tsql_stmtContext ctx) { + return exec.stmt.ifTsql(ctx); + } + + /** + * IF statement (BTEQ syntax) + */ + @Override + public Integer visitIf_bteq_stmt(org.apache.doris.hplsql.HplsqlParser.If_bteq_stmtContext ctx) { + return exec.stmt.ifBteq(ctx); + } + + /** + * USE statement + */ + @Override + public Integer visitUse_stmt(org.apache.doris.hplsql.HplsqlParser.Use_stmtContext ctx) { + return exec.stmt.use(ctx); + } + + /** + * VALUES statement + */ + @Override + public Integer visitValues_into_stmt(org.apache.doris.hplsql.HplsqlParser.Values_into_stmtContext ctx) { + return exec.stmt.values(ctx); + } + + /** + * WHILE statement + */ + @Override + public Integer visitWhile_stmt(org.apache.doris.hplsql.HplsqlParser.While_stmtContext ctx) { + return exec.stmt.while_(ctx); + } + + @Override + public Integer visitUnconditional_loop_stmt( + org.apache.doris.hplsql.HplsqlParser.Unconditional_loop_stmtContext ctx) { + return exec.stmt.unconditionalLoop(ctx); + } + + /** + * FOR cursor statement + */ + @Override + public Integer visitFor_cursor_stmt(org.apache.doris.hplsql.HplsqlParser.For_cursor_stmtContext ctx) { + return exec.stmt.forCursor(ctx); + } + + /** + * FOR (integer range) statement + */ + @Override + public Integer visitFor_range_stmt(org.apache.doris.hplsql.HplsqlParser.For_range_stmtContext ctx) { + return exec.stmt.forRange(ctx); + } + + /** + * EXEC, EXECUTE and EXECUTE IMMEDIATE statement to execute dynamic SQL + */ + @Override + public Integer visitExec_stmt(org.apache.doris.hplsql.HplsqlParser.Exec_stmtContext ctx) { + exec.inCallStmt = true; + Integer rc = exec.stmt.exec(ctx); + exec.inCallStmt = false; + return rc; + } + + /** + * CALL statement + */ + @Override + public Integer visitCall_stmt(org.apache.doris.hplsql.HplsqlParser.Call_stmtContext ctx) { + exec.inCallStmt = true; + try { + if (ctx.expr_func() != null) { + functionCall(ctx, ctx.expr_func().ident(), ctx.expr_func().expr_func_params()); + } else if (ctx.expr_dot() != null) { + visitExpr_dot(ctx.expr_dot()); + } else if (ctx.ident() != null) { + functionCall(ctx, ctx.ident(), null); + } + } finally { + exec.inCallStmt = false; + } + return 0; + } + + /** + * EXIT statement (leave the specified loop with a condition) + */ + @Override + public Integer visitExit_stmt(org.apache.doris.hplsql.HplsqlParser.Exit_stmtContext ctx) { + return exec.stmt.exit(ctx); + } + + /** + * BREAK statement (leave the innermost loop unconditionally) + */ + @Override + public Integer visitBreak_stmt(org.apache.doris.hplsql.HplsqlParser.Break_stmtContext ctx) { + return exec.stmt.break_(ctx); + } + + /** + * LEAVE statement (leave the specified loop unconditionally) + */ + @Override + public Integer visitLeave_stmt(org.apache.doris.hplsql.HplsqlParser.Leave_stmtContext ctx) { + return exec.stmt.leave(ctx); + } + + /** + * PRINT statement + */ + @Override + public Integer visitPrint_stmt(org.apache.doris.hplsql.HplsqlParser.Print_stmtContext ctx) { + return exec.stmt.print(ctx); + } + + /** + * QUIT statement + */ + @Override + public Integer visitQuit_stmt(org.apache.doris.hplsql.HplsqlParser.Quit_stmtContext ctx) { + return exec.stmt.quit(ctx); + } + + /** + * SIGNAL statement + */ + @Override + public Integer visitSignal_stmt(org.apache.doris.hplsql.HplsqlParser.Signal_stmtContext ctx) { + return exec.stmt.signal(ctx); + } + + /** + * SUMMARY statement + */ + @Override + public Integer visitSummary_stmt(org.apache.doris.hplsql.HplsqlParser.Summary_stmtContext ctx) { + return exec.stmt.summary(ctx); + } + + /** + * RESIGNAL statement + */ + @Override + public Integer visitResignal_stmt(org.apache.doris.hplsql.HplsqlParser.Resignal_stmtContext ctx) { + return exec.stmt.resignal(ctx); + } + + /** + * RETURN statement + */ + @Override + public Integer visitReturn_stmt(org.apache.doris.hplsql.HplsqlParser.Return_stmtContext ctx) { + return exec.stmt.return_(ctx); + } + + /** + * SET session options + */ + @Override + public Integer visitSet_current_schema_option( + org.apache.doris.hplsql.HplsqlParser.Set_current_schema_optionContext ctx) { + return exec.stmt.setCurrentSchema(ctx); + } + + /** + * TRUNCATE statement + */ + @Override + public Integer visitTruncate_stmt(org.apache.doris.hplsql.HplsqlParser.Truncate_stmtContext ctx) { + return exec.stmt.truncate(ctx); + } + + @Override + public Integer visitCreate_table_type_stmt(org.apache.doris.hplsql.HplsqlParser.Create_table_type_stmtContext ctx) { + String name = ctx.ident().getText(); + String index = ctx.dtype().getText(); + if (!"BINARY_INTEGER".equalsIgnoreCase(index)) { + throw new TypeException(ctx, "Unsupported table index: " + index + " Use: BINARY_INTEGER"); + } + org.apache.doris.hplsql.HplsqlParser.Tbl_typeContext tblType = ctx.tbl_type(); + if (tblType.sql_type() != null) { + String dbTable = tblType.sql_type().qident().getText(); + if (tblType.sql_type().T_ROWTYPE() != null) { + Row rowType = meta.getRowDataType(ctx, exec.conf.defaultConnection, dbTable); + exec.addType(new TableClass(name, rowType.columnDefinitions(), true)); + } else if (dbTable.contains(".")) { // column type + String column = dbTable.substring(dbTable.indexOf(".") + 1); + String colType = meta.getDataType(ctx, exec.conf.defaultConnection, dbTable); + exec.addType(new TableClass(name, + Collections.singletonList(new ColumnDefinition(column, ColumnType.parse(colType))), false)); + } else { + throw new TypeException(ctx, "Invalid table type attribute. Expected %TYPE or %ROWTYPE"); + } + if (trace) { + trace(ctx, "CREATE TABLE TYPE: " + name + " TYPE: " + dbTable + " INDEX: " + index); + } + } else { + String colType = tblType.dtype().getText(); + exec.addType( + new TableClass(name, Collections.singletonList(ColumnDefinition.unnamed(ColumnType.parse(colType))), + false)); + if (trace) { + trace(ctx, "CREATE TABLE TYPE: " + name + " TYPE: " + colType + " INDEX: " + index); + } + } + return 1; + } + + private void addType(TableClass tableClass) { + types.put(tableClass.typeName(), tableClass); + } + + public TableClass getType(String name) { + return types.get(name); + } + + /** + * MAP OBJECT statement + */ + @Override + public Integer visitMap_object_stmt(org.apache.doris.hplsql.HplsqlParser.Map_object_stmtContext ctx) { + String source = ctx.ident(0).getText(); + String target = null; + String conn = null; + if (ctx.T_TO() != null) { + target = ctx.ident(1).getText(); + exec.objectMap.put(source.toUpperCase(), target); + } + if (ctx.T_AT() != null) { + if (ctx.T_TO() == null) { + conn = ctx.ident(1).getText(); + } else { + conn = ctx.ident(2).getText(); + } + exec.objectConnMap.put(source.toUpperCase(), conn); + } + if (trace) { + String log = "MAP OBJECT " + source; + if (target != null) { + log += " AS " + target; + } + if (conn != null) { + log += " AT " + conn; + } + trace(ctx, log); + } + return 0; + } + + /** + * UPDATE statement + */ + @Override + public Integer visitUpdate_stmt(org.apache.doris.hplsql.HplsqlParser.Update_stmtContext ctx) { + return stmt.update(ctx); + } + + /** + * DELETE statement + */ + @Override + public Integer visitDelete_stmt(org.apache.doris.hplsql.HplsqlParser.Delete_stmtContext ctx) { + return stmt.delete(ctx); + } + + /** + * MERGE statement + */ + @Override + public Integer visitMerge_stmt(org.apache.doris.hplsql.HplsqlParser.Merge_stmtContext ctx) { + return stmt.merge(ctx); + } + + /** + * Run a Hive command line + */ + @Override + public Integer visitHive(@NotNull org.apache.doris.hplsql.HplsqlParser.HiveContext ctx) { + trace(ctx, "HIVE"); + ArrayList cmd = new ArrayList<>(); + cmd.add("hive"); + Var params = new Var(Var.Type.STRINGLIST, cmd); + stackPush(params); + visitChildren(ctx); + stackPop(); + try { + String[] cmdarr = new String[cmd.size()]; + cmd.toArray(cmdarr); + if (trace) { + trace(ctx, "HIVE Parameters: " + Utils.toString(cmdarr, ' ')); + } + if (!offline) { + Process p = Runtime.getRuntime().exec(cmdarr); + new StreamGobbler(p.getInputStream(), console).start(); + new StreamGobbler(p.getErrorStream(), console).start(); + int rc = p.waitFor(); + if (trace) { + trace(ctx, "HIVE Process exit code: " + rc); + } + } + } catch (Exception e) { + setSqlCode(SqlCodes.ERROR); + signal(Signal.Type.SQLEXCEPTION, e.getMessage(), e); + return -1; + } + return 0; + } + + @Override + @SuppressWarnings("unchecked") + public Integer visitHive_item(org.apache.doris.hplsql.HplsqlParser.Hive_itemContext ctx) { + Var params = stackPeek(); + ArrayList a = (ArrayList) params.value; + String param = ctx.getChild(1).getText(); + switch (param) { + case "e": + a.add("-e"); + a.add(evalPop(ctx.expr()).toString()); + break; + case "f": + a.add("-f"); + a.add(evalPop(ctx.expr()).toString()); + break; + case "hiveconf": + a.add("-hiveconf"); + a.add(ctx.L_ID().toString() + "=" + evalPop(ctx.expr()).toString()); + break; + default: + } + return 0; + } + + /** + * Executing OS command + */ + @Override + public Integer visitHost_cmd(org.apache.doris.hplsql.HplsqlParser.Host_cmdContext ctx) { + trace(ctx, "HOST"); + execHost(ctx, ctx.start.getInputStream().getText( + new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()))); + return 0; + } + + @Override + public Integer visitHost_stmt(org.apache.doris.hplsql.HplsqlParser.Host_stmtContext ctx) { + trace(ctx, "HOST"); + execHost(ctx, evalPop(ctx.expr()).toString()); + return 0; + } + + public void execHost(ParserRuleContext ctx, String cmd) { + try { + if (trace) { + trace(ctx, "HOST Command: " + cmd); + } + Process p = Runtime.getRuntime().exec(cmd); + new StreamGobbler(p.getInputStream(), console).start(); + new StreamGobbler(p.getErrorStream(), console).start(); + int rc = p.waitFor(); + if (trace) { + trace(ctx, "HOST Process exit code: " + rc); + } + setHostCode(rc); + } catch (Exception e) { + setHostCode(1); + signal(Signal.Type.SQLEXCEPTION); + } + } + + /** + * Standalone expression (as a statement) + */ + @Override + public Integer visitExpr_stmt(org.apache.doris.hplsql.HplsqlParser.Expr_stmtContext ctx) { + visitChildren(ctx); + return 0; + } + + /** + * String concatenation operator + */ + @Override + public Integer visitExpr_concat(org.apache.doris.hplsql.HplsqlParser.Expr_concatContext ctx) { + if (exec.buildSql) { + exec.expr.operatorConcatSql(ctx); + } else { + exec.expr.operatorConcat(ctx); + } + return 0; + } + + @Override + public Integer visitExpr_dot_method_call(org.apache.doris.hplsql.HplsqlParser.Expr_dot_method_callContext ctx) { + if (exec.buildSql) { + exec.stackPush(new Var(Var.Type.IDENT, ctx.getText())); + return 0; + } + Var var = ctx.ident() != null + ? findVariable(ctx.ident().getText()) + : evalPop(ctx.expr_func(0)); + + if (var == null && ctx.ident() != null) { + Package pkg = findPackage(ctx.ident().getText()); + String pkgFuncName = ctx.expr_func(0).ident().getText().toUpperCase(); + boolean executed = pkg.execFunc(pkgFuncName, ctx.expr_func(0).expr_func_params()); + Package packCallContext = exec.getPackageCallContext(); + if (!executed && packCallContext != null) { + packCallContext.execFunc(pkgFuncName, ctx.expr_func(0).expr_func_params()); + } + return 0; + } + + org.apache.doris.hplsql.HplsqlParser.Expr_funcContext method = ctx.expr_func(ctx.expr_func().size() - 1); + switch (var.type) { + case HPL_OBJECT: + Var result = dispatch(ctx, (HplObject) var.value, method.ident().getText(), method.expr_func_params()); + stackPush(result); + return 0; + default: + throw new TypeException(ctx, var.type + " is not an object"); + } + } + + @Override + public Integer visitExpr_dot_property_access( + org.apache.doris.hplsql.HplsqlParser.Expr_dot_property_accessContext ctx) { + if (exec.buildSql) { + exec.stackPush(new Var(Var.Type.IDENT, ctx.getText())); + return 0; + } + Var var = ctx.expr_func() != null + ? evalPop(ctx.expr_func()) + : findVariable(ctx.ident(0).getText()); + String property = ctx.ident(ctx.ident().size() - 1).getText(); + + if (var == null && ctx.expr_func() == null) { + Package pkg = findPackage(ctx.ident(0).getText()); + Var variable = pkg.findVariable(property); + if (variable != null) { + stackPush(variable); + } else { + Package packCallContext = exec.getPackageCallContext(); + stackPush(packCallContext.findVariable(property)); + } + return 0; + } + + switch (var.type) { + case HPL_OBJECT: + Var result = dispatch(ctx, (HplObject) var.value, property, Collections.emptyList()); + stackPush(result); + return 0; + case ROW: + stackPush(((Row) var.value).getValue(property)); + return 0; + default: + throw new TypeException(ctx, var.type + " is not an object/row"); + } + } + + /** + * Simple CASE expression + */ + @Override + public Integer visitExpr_case_simple(org.apache.doris.hplsql.HplsqlParser.Expr_case_simpleContext ctx) { + if (exec.buildSql) { + exec.expr.execSimpleCaseSql(ctx); + } else { + exec.expr.execSimpleCase(ctx); + } + return 0; + } + + /** + * Searched CASE expression + */ + @Override + public Integer visitExpr_case_searched(org.apache.doris.hplsql.HplsqlParser.Expr_case_searchedContext ctx) { + if (exec.buildSql) { + exec.expr.execSearchedCaseSql(ctx); + } else { + exec.expr.execSearchedCase(ctx); + } + return 0; + } + + /** + * GET DIAGNOSTICS EXCEPTION statement + */ + @Override + public Integer visitGet_diag_stmt_exception_item( + org.apache.doris.hplsql.HplsqlParser.Get_diag_stmt_exception_itemContext ctx) { + return exec.stmt.getDiagnosticsException(ctx); + } + + /** + * GET DIAGNOSTICS ROW_COUNT statement + */ + @Override + public Integer visitGet_diag_stmt_rowcount_item( + org.apache.doris.hplsql.HplsqlParser.Get_diag_stmt_rowcount_itemContext ctx) { + return exec.stmt.getDiagnosticsRowCount(ctx); + } + + /** + * GRANT statement + */ + @Override + public Integer visitGrant_stmt(org.apache.doris.hplsql.HplsqlParser.Grant_stmtContext ctx) { + trace(ctx, "GRANT"); + return 0; + } + + /** + * Label + */ + @Override + public Integer visitLabel(org.apache.doris.hplsql.HplsqlParser.LabelContext ctx) { + if (ctx.L_ID() != null) { + exec.labels.push(ctx.L_ID().toString()); + } else { + String label = ctx.L_LABEL().getText(); + if (label.endsWith(":")) { + label = label.substring(0, label.length() - 1); + } + exec.labels.push(label); + } + return 0; + } + + /** + * Identifier + */ + @Override + public Integer visitIdent(org.apache.doris.hplsql.HplsqlParser.IdentContext ctx) { + boolean hasSub = false; + String ident = ctx.getText(); + String actualIdent = ident; + if (ident.startsWith("-")) { + hasSub = true; + actualIdent = ident.substring(1); + } + + Var var = findVariable(actualIdent); + if (var != null) { + if (!exec.buildSql) { + if (hasSub) { + Var var1 = new Var(var); + var1.negate(); + exec.stackPush(var1); + } else { + exec.stackPush(var); + } + } else { + exec.stackPush(new Var(ident, Var.Type.STRING, var.toSqlString())); + } + } else { + if (exec.buildSql || exec.inCallStmt) { + exec.stackPush(new Var(Var.Type.IDENT, ident)); + } else { + ident = ident.toUpperCase(); + if (!exec.functions.exec(ident, null)) { + throw new UndefinedIdentException(ctx, ident); + } + } + } + return 0; + } + + /** + * Single quoted string literal + */ + @Override + public Integer visitSingle_quotedString(org.apache.doris.hplsql.HplsqlParser.Single_quotedStringContext ctx) { + if (exec.buildSql) { + exec.stackPush(ctx.getText()); + } else { + exec.stackPush(Utils.unquoteString(ctx.getText())); + } + return 0; + } + + /** + * Integer literal, signed or unsigned + */ + @Override + public Integer visitInt_number(org.apache.doris.hplsql.HplsqlParser.Int_numberContext ctx) { + exec.stack.push(new Var(Long.valueOf(ctx.getText()))); + return 0; + } + + /** + * Interval expression (INTERVAL '1' DAY i.e) + */ + @Override + public Integer visitExpr_interval(org.apache.doris.hplsql.HplsqlParser.Expr_intervalContext ctx) { + int num = evalPop(ctx.expr()).intValue(); + Interval interval = new Interval().set(num, ctx.interval_item().getText()); + stackPush(new Var(interval)); + return 0; + } + + /** + * Decimal literal, signed or unsigned + */ + @Override + public Integer visitDec_number(org.apache.doris.hplsql.HplsqlParser.Dec_numberContext ctx) { + stackPush(new Var(new BigDecimal(ctx.getText()))); + return 0; + } + + /** + * Boolean literal + */ + @Override + public Integer visitBool_literal(org.apache.doris.hplsql.HplsqlParser.Bool_literalContext ctx) { + boolean val = true; + if (ctx.T_FALSE() != null) { + val = false; + } + stackPush(new Var(val)); + return 0; + } + + /** + * NULL constant + */ + @Override + public Integer visitNull_const(org.apache.doris.hplsql.HplsqlParser.Null_constContext ctx) { + stackPush(new Var()); + return 0; + } + + /** + * DATE 'YYYY-MM-DD' literal + */ + @Override + public Integer visitDate_literal(org.apache.doris.hplsql.HplsqlParser.Date_literalContext ctx) { + if (!exec.buildSql) { + String str = evalPop(ctx.string()).toString(); + stackPush(new Var(Var.Type.DATE, Utils.toDate(str))); + } else { + stackPush(getFormattedText(ctx)); + } + return 0; + } + + /** + * TIMESTAMP 'YYYY-MM-DD HH:MI:SS.FFF' literal + */ + @Override + public Integer visitTimestamp_literal(org.apache.doris.hplsql.HplsqlParser.Timestamp_literalContext ctx) { + if (!exec.buildSql) { + String str = evalPop(ctx.string()).toString(); + int len = str.length(); + int precision = 0; + if (len > 19 && len <= 29) { + precision = len - 20; + if (precision > 3) { + precision = 3; + } + } + stackPush(new Var(Utils.toTimestamp(str), precision)); + } else { + stackPush(getFormattedText(ctx)); + } + return 0; + } + + /** + * Get the package context within which the current routine is executed + */ + Package getPackageCallContext() { + Scope cur = exec.currentScope; + while (cur != null) { + if (cur.type == Scope.Type.ROUTINE) { + return cur.pack; + } + cur = cur.parent; + } + return null; + } + + /** + * Define the connection profile to execute the current statement + */ + public String getStatementConnection() { + if (exec.stmtConnList.contains(exec.conf.defaultConnection)) { + return exec.conf.defaultConnection; + } else if (!exec.stmtConnList.isEmpty()) { + return exec.stmtConnList.get(0); + } + return exec.conf.defaultConnection; + } + + /** + * Define the connection profile for the specified object + * + * @return + */ + String getObjectConnection(String name) { + String conn = exec.objectConnMap.get(name.toUpperCase()); + if (conn != null) { + return conn; + } + return exec.conf.defaultConnection; + } + + /** + * Get the connection (open the new connection if not available) + * + * @throws Exception + */ + Connection getConnection(String conn) throws Exception { + if (conn == null || conn.equalsIgnoreCase("default")) { + conn = exec.conf.defaultConnection; + } + return exec.conn.getConnection(conn); + } + + /** + * Return the connection to the pool + */ + void returnConnection(String name, Connection conn) { + exec.conn.returnConnection(name, conn); + } + + /** + * Define the database type by profile name + */ + Conn.Type getConnectionType(String conn) { + return exec.conn.getTypeByProfile(conn); + } + + /** + * Get the current database type + */ + public Conn.Type getConnectionType() { + return getConnectionType(exec.conf.defaultConnection); + } + + /** + * Add managed temporary table + */ + public void addManagedTable(String name, String managedName) { + exec.managedTables.put(name, managedName); + } + + /** + * Get node text including spaces + */ + String getText(ParserRuleContext ctx) { + return ctx.start.getInputStream() + .getText(new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); + } + + String getText(ParserRuleContext ctx, Token start, Token stop) { + return ctx.start.getInputStream() + .getText(new org.antlr.v4.runtime.misc.Interval(start.getStartIndex(), stop.getStopIndex())); + } + + /** + * Append the text preserving the formatting (space symbols) between tokens + */ + void append(StringBuilder str, String appendStr, Token start, Token stop) { + String spaces = start.getInputStream() + .getText(new org.antlr.v4.runtime.misc.Interval(start.getStartIndex(), stop.getStopIndex())); + spaces = spaces.substring(start.getText().length(), spaces.length() - stop.getText().length()); + str.append(spaces); + str.append(appendStr); + } + + void append(StringBuilder str, TerminalNode start, TerminalNode stop) { + String text = start.getSymbol().getInputStream().getText( + new org.antlr.v4.runtime.misc.Interval(start.getSymbol().getStartIndex(), + stop.getSymbol().getStopIndex())); + str.append(text); + } + + /** + * Get the first non-null node + */ + TerminalNode nvl(TerminalNode t1, TerminalNode t2) { + if (t1 != null) { + return t1; + } + return t2; + } + + /** + * Evaluate the expression and pop value from the stack + */ + public Var evalPop(ParserRuleContext ctx) { + visit(ctx); + if (!exec.stack.isEmpty()) { + return exec.stackPop(); + } + return Var.Empty; + } + + /** + * Evaluate the data type and length + */ + String evalPop(org.apache.doris.hplsql.HplsqlParser.DtypeContext type, + org.apache.doris.hplsql.HplsqlParser.Dtype_lenContext len) { + if (isConvert(exec.conf.defaultConnection)) { + return exec.converter.dataType(type, len); + } + return getText(type, type.getStart(), len == null ? type.getStop() : len.getStop()); + } + + /** + * Get formatted text between 2 tokens + */ + public static String getFormattedText(ParserRuleContext ctx) { + return ctx.start.getInputStream().getText( + new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); + } + + /** + * Flag whether executed from UDF or not + */ + public void setUdfRun(boolean udfRun) { + this.udfRun = udfRun; + } + + /** + * Whether on-the-fly SQL conversion is required for the connection + */ + boolean isConvert(String connName) { + return exec.conf.getConnectionConvert(connName); + } + + /** + * Increment the row count + */ + public int incRowCount() { + return exec.rowCount++; + } + + /** + * Set the row count + */ + public void setRowCount(int rowCount) { + exec.rowCount = rowCount; + } + + /** + * Trace information + */ + public void trace(ParserRuleContext ctx, String message) { + if (!trace) { + return; + } + if (ctx != null) { + console.printLine("Ln:" + ctx.getStart().getLine() + " " + message); + } else { + console.printLine(message); + } + } + + /** + * Trace values retrived from the database + */ + public void trace(ParserRuleContext ctx, Var var, Metadata meta, int idx) { + if (var.type != Var.Type.ROW) { + trace(ctx, "COLUMN: " + meta.columnName(idx) + ", " + meta.columnTypeName(idx)); + trace(ctx, "SET " + var.getName() + " = " + var.toString()); + } else { + Row row = (Row) var.value; + int cnt = row.size(); + for (int j = 1; j <= cnt; j++) { + Var v = row.getValue(j - 1); + trace(ctx, "COLUMN: " + meta.columnName(j) + ", " + meta.columnTypeName(j)); + trace(ctx, "SET " + v.getName() + " = " + v.toString()); + } + } + } + + /** + * Informational messages + */ + public void info(ParserRuleContext ctx, String message) { + if (!info) { + return; + } + if (ctx != null) { + console.printError("Ln:" + ctx.getStart().getLine() + " " + message); + } else { + console.printError(message); + } + } + + /** + * Error message + */ + public void error(ParserRuleContext ctx, String message) { + if (ctx != null) { + console.printError("Ln:" + ctx.getStart().getLine() + " " + message); + } else { + console.printError(message); + } + } + + public Stack getStack() { + return exec.stack; + } + + public int getRowCount() { + return exec.rowCount; + } + + public Conf getConf() { + return exec.conf; + } + + public Meta getMeta() { + return exec.meta; + } + + public boolean getTrace() { + return exec.trace; + } + + public boolean getInfo() { + return exec.info; + } + + public boolean getOffline() { + return exec.offline; + } + + public Console getConsole() { + return console; + } + + public void setQueryExecutor(QueryExecutor queryExecutor) { + this.queryExecutor = queryExecutor; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Expression.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Expression.java new file mode 100644 index 00000000000000..d6930cdd33f9f4 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Expression.java @@ -0,0 +1,718 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.Var.Type; +import org.apache.doris.hplsql.exception.HplValidationException; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Timestamp; +import java.util.Calendar; + +/** + * Expressions + */ +public class Expression { + + Exec exec; + boolean trace = false; + + Expression(Exec e) { + exec = e; + trace = exec.getTrace(); + } + + /** + * Evaluate an expression + */ + public void exec(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + try { + if (ctx.T_ADD() != null) { + operatorAdd(ctx); + } else if (ctx.T_SUB() != null) { + operatorSub(ctx); + } else if (ctx.T_MUL() != null) { + operatorMultiply(ctx); + } else if (ctx.T_DIV() != null) { + operatorDiv(ctx); + } else if (ctx.interval_item() != null) { + createInterval(ctx); + } else { + visitChildren(ctx); + } + } catch (HplValidationException e) { + throw e; + } catch (Exception e) { + exec.signal(e); + } + } + + /** + * Evaluate an expression in executable SQL statement + */ + public void execSql(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + StringBuilder sql = new StringBuilder(); + if (ctx.T_OPEN_P() != null) { + sql.append("("); + if (ctx.select_stmt() != null) { + exec.append(sql, evalPop(ctx.select_stmt()).toString(), ctx.T_OPEN_P().getSymbol(), + ctx.select_stmt().getStart()); + exec.append(sql, ctx.T_CLOSE_P().getText(), ctx.select_stmt().stop, ctx.T_CLOSE_P().getSymbol()); + } else { + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(")"); + } + } else if (ctx.T_MUL() != null) { + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(" * "); + sql.append(evalPop(ctx.expr(1)).toString()); + } else if (ctx.T_DIV() != null) { + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(" / "); + sql.append(evalPop(ctx.expr(1)).toString()); + } else if (ctx.T_ADD() != null) { + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(" + "); + sql.append(evalPop(ctx.expr(1)).toString()); + } else if (ctx.T_SUB() != null) { + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(" - "); + sql.append(evalPop(ctx.expr(1)).toString()); + } else if (ctx.interval_item() != null) { + sql.append(exec.getFormattedText(ctx)); + } else { + visitChildren(ctx); + sql.append(exec.stackPop().toString()); + } + exec.stackPush(sql); + } + + /** + * Evaluate a boolean expression + */ + public void execBool(org.apache.doris.hplsql.HplsqlParser.Bool_exprContext ctx) { + if (ctx.bool_expr_atom() != null) { + eval(ctx.bool_expr_atom()); + return; + } + Var result = evalPop(ctx.bool_expr(0)); + if (ctx.T_OPEN_P() != null) { + if (ctx.T_NOT() != null) { + result.negate(); + } + } else if (ctx.bool_expr_logical_operator() != null) { + if (ctx.bool_expr_logical_operator().T_AND() != null) { + if (result.isTrue()) { + result = evalPop(ctx.bool_expr(1)); + } + } else if (ctx.bool_expr_logical_operator().T_OR() != null) { + if (!result.isTrue()) { + result = evalPop(ctx.bool_expr(1)); + } + } + } + exec.stackPush(result); + } + + /** + * Evaluate a boolean expression in executable SQL statement + */ + public void execBoolSql(org.apache.doris.hplsql.HplsqlParser.Bool_exprContext ctx) { + StringBuilder sql = new StringBuilder(); + if (ctx.T_OPEN_P() != null) { + if (ctx.T_NOT() != null) { + sql.append(ctx.T_NOT().getText() + " "); + } + sql.append("("); + sql.append(evalPop(ctx.bool_expr(0)).toString()); + sql.append(")"); + } else if (ctx.bool_expr_atom() != null) { + sql.append(evalPop(ctx.bool_expr_atom()).toString()); + } else if (ctx.bool_expr_logical_operator() != null) { + sql.append(evalPop(ctx.bool_expr(0)).toString()); + sql.append(" " + ctx.bool_expr_logical_operator().getText() + " "); + sql.append(evalPop(ctx.bool_expr(1)).toString()); + } + exec.stackPush(sql); + } + + /** + * Binary boolean expression + */ + public Integer execBoolBinary(org.apache.doris.hplsql.HplsqlParser.Bool_expr_binaryContext ctx) { + org.apache.doris.hplsql.HplsqlParser.Bool_expr_binary_operatorContext op = ctx.bool_expr_binary_operator(); + if (op.T_EQUAL() != null || op.T_EQUAL2() != null) { + operatorEqual(ctx, true); + } else if (op.T_NOTEQUAL() != null || op.T_NOTEQUAL2() != null) { + operatorEqual(ctx, false); + } else if (op.T_GREATER() != null || op.T_LESS() != null || op.T_GREATEREQUAL() != null + || op.T_LESSEQUAL() != null) { + operatorCompare(ctx, op); + } else { + exec.stackPush(false); + } + return 0; + } + + /** + * Binary boolean expression in executable SQL statement + */ + public Integer execBoolBinarySql(org.apache.doris.hplsql.HplsqlParser.Bool_expr_binaryContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(" " + exec.getFormattedText(ctx.bool_expr_binary_operator()) + " "); + sql.append(evalPop(ctx.expr(1)).toString()); + exec.stackPush(sql); + return 0; + } + + /** + * Unary boolean expression + */ + public Integer execBoolUnary(org.apache.doris.hplsql.HplsqlParser.Bool_expr_unaryContext ctx) { + boolean val = false; + if (ctx.T_IS() != null) { + val = evalPop(ctx.expr(0)).isNull(); + if (ctx.T_NOT() != null) { + val = !val; + } + } else if (ctx.T_BETWEEN() != null) { + Var v = evalPop(ctx.expr(0)); + Var v1 = evalPop(ctx.expr(1)); + int cmp = v.compareTo(v1); + if (cmp >= 0) { + Var v2 = evalPop(ctx.expr(2)); + cmp = v.compareTo(v2); + if (cmp <= 0) { + val = true; + } + } + } + exec.stackPush(val); + return 0; + } + + /** + * Unary boolean expression in executable SQL statement + */ + public Integer execBoolUnarySql(org.apache.doris.hplsql.HplsqlParser.Bool_expr_unaryContext ctx) { + StringBuilder sql = new StringBuilder(); + if (ctx.T_IS() != null) { + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(" " + exec.getText(ctx, ctx.T_IS().getSymbol(), ctx.T_NULL().getSymbol())); + } else if (ctx.T_BETWEEN() != null) { + sql.append(evalPop(ctx.expr(0)).toString()); + sql.append(" " + ctx.T_BETWEEN().getText() + " "); + sql.append(evalPop(ctx.expr(1)).toString()); + sql.append(" " + ctx.T_AND().getText() + " "); + sql.append(evalPop(ctx.expr(2)).toString()); + } else if (ctx.T_EXISTS() != null) { + exec.append(sql, exec.nvl(ctx.T_NOT(), ctx.T_EXISTS()), ctx.T_OPEN_P()); + exec.append(sql, evalPop(ctx.select_stmt()).toString(), ctx.T_OPEN_P().getSymbol(), + ctx.select_stmt().getStart()); + exec.append(sql, ctx.T_CLOSE_P().getText(), ctx.select_stmt().stop, ctx.T_CLOSE_P().getSymbol()); + } else if (ctx.bool_expr_single_in() != null) { + singleInClauseSql(ctx.bool_expr_single_in(), sql); + } else if (ctx.bool_expr_multi_in() != null) { + multiInClauseSql(ctx.bool_expr_multi_in(), sql); + } + exec.stackPush(sql); + return 0; + } + + /** + * Single value IN clause in executable SQL statement + */ + public void singleInClauseSql(org.apache.doris.hplsql.HplsqlParser.Bool_expr_single_inContext ctx, + StringBuilder sql) { + sql.append(evalPop(ctx.expr(0)).toString() + " "); + exec.append(sql, exec.nvl(ctx.T_NOT(), ctx.T_IN()), ctx.T_OPEN_P()); + if (ctx.select_stmt() != null) { + exec.append(sql, evalPop(ctx.select_stmt()).toString(), ctx.T_OPEN_P().getSymbol(), + ctx.select_stmt().getStart()); + exec.append(sql, ctx.T_CLOSE_P().getText(), ctx.select_stmt().stop, ctx.T_CLOSE_P().getSymbol()); + } else { + int cnt = ctx.expr().size(); + for (int i = 1; i < cnt; i++) { + sql.append(evalPop(ctx.expr(i)).toString()); + if (i + 1 < cnt) { + sql.append(", "); + } + } + sql.append(")"); + } + } + + /** + * Multi-value IN clause in executable SQL statement + */ + public void multiInClauseSql(org.apache.doris.hplsql.HplsqlParser.Bool_expr_multi_inContext ctx, + StringBuilder sql) { + int cnt = ctx.expr().size(); + sql.append("("); + for (int i = 0; i < cnt; i++) { + sql.append(evalPop(ctx.expr(i)).toString()); + if (i + 1 < cnt) { + sql.append(", "); + } + } + sql.append(")"); + if (ctx.T_NOT() != null) { + sql.append(" " + ctx.T_NOT().getText()); + } + sql.append(" " + ctx.T_IN().getText() + " ("); + if (ctx.select_stmt() != null) { + sql.append(evalPop(ctx.select_stmt())); + } + sql.append(")"); + } + + /** + * Cursor attribute %ISOPEN, %FOUND and %NOTFOUND + */ + public void execCursorAttribute(org.apache.doris.hplsql.HplsqlParser.Expr_cursor_attributeContext ctx) { + String name = ctx.ident().getText(); + Var val = new Var(Var.Type.BOOL); + Var cursorVar = exec.findCursor(name); + if (cursorVar != null) { + Cursor cursor = (Cursor) cursorVar.value; + if (cursor != null) { + if (ctx.T_ISOPEN() != null) { + val.setValue(cursor.isOpen()); + } else if (ctx.T_FOUND() != null) { + val.setValue(cursor.isFound()); + } else if (ctx.T_NOTFOUND() != null) { + val.setValue(cursor.isNotFound()); + } + } + exec.stackPush(val); + } else { + trace(ctx, "Cursor not found: " + name); + exec.signal(org.apache.doris.hplsql.Signal.Type.SQLEXCEPTION); + } + } + + /** + * Addition operator + */ + public void operatorAdd(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + Var v1 = evalPop(ctx.expr(0)); + Var v2 = evalPop(ctx.expr(1)); + if (v1.value == null || v2.value == null) { + evalNull(); + } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) { + exec.stackPush(new Var((long) v1.value + (long) v2.value)); + } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((long) v1.value)).add((BigDecimal) v2.value))); + } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((long) v1.value + (double) v2.value)); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) { + exec.stackPush(new Var(((BigDecimal) v1.value).add((BigDecimal) v2.value))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((BigDecimal) v1.value).add(new BigDecimal((long) v2.value)))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) { + exec.stackPush(new Var(((BigDecimal) v1.value).add(new BigDecimal((double) v2.value)))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((double) v1.value + (double) v2.value)); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((double) v1.value)).add((BigDecimal) v2.value))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((double) v1.value) + (long) v2.value)); + } else if (v1.type == Type.BIGINT && v2.type == Type.DATE) { + exec.stackPush(changeDateByInt((Date) v2.value, (long) v1.value, true /*add*/)); + } else if (v1.type == Type.DATE && v2.type == Type.BIGINT) { + exec.stackPush(changeDateByInt((Date) v1.value, (long) v2.value, true /*add*/)); + } else if (v1.type == Type.STRING && v2.type == Type.STRING) { + exec.stackPush(((String) v1.value) + ((String) v2.value)); + } else if (v1.type == Type.DATE && v2.type == Type.INTERVAL) { + exec.stackPush(new Var(((Interval) v2.value).dateChange((Date) v1.value, true /*add*/))); + } else if (v1.type == Type.TIMESTAMP && v2.type == Type.INTERVAL) { + exec.stackPush( + new Var(((Interval) v2.value).timestampChange((Timestamp) v1.value, true /*add*/), v1.scale)); + } else { + unsupported(ctx, v1, v2, "+"); + } + } + + /** + * Subtraction operator + */ + public void operatorSub(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + Var v1 = evalPop(ctx.expr(0)); + Var v2 = evalPop(ctx.expr(1)); + if (v1.value == null || v2.value == null) { + evalNull(); + } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) { + exec.stackPush(new Var((long) v1.value - (long) v2.value)); + } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((long) v1.value)).subtract((BigDecimal) v2.value))); + } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((long) v1.value - (double) v2.value)); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) { + exec.stackPush(new Var(((BigDecimal) v1.value).subtract((BigDecimal) v2.value))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((BigDecimal) v1.value).subtract(new BigDecimal((long) v2.value)))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) { + exec.stackPush(new Var(((BigDecimal) v1.value).subtract(new BigDecimal((double) v2.value)))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((double) v1.value - (double) v2.value)); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((double) v1.value)).subtract((BigDecimal) v2.value))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((double) v1.value) - (long) v2.value)); + } else if (v1.type == Type.DATE && v2.type == Type.BIGINT) { + exec.stackPush(changeDateByInt((Date) v1.value, (long) v2.value, false /*subtract*/)); + } else if (v1.type == Type.DATE && v2.type == Type.INTERVAL) { + exec.stackPush(new Var(((Interval) v2.value).dateChange((Date) v1.value, false /*subtract*/))); + } else if (v1.type == Type.TIMESTAMP && v2.type == Type.INTERVAL) { + exec.stackPush( + new Var(((Interval) v2.value).timestampChange((Timestamp) v1.value, false /*subtract*/), v1.scale)); + } else { + unsupported(ctx, v1, v2, "-"); + } + } + + /** + * Multiplication operator + */ + public void operatorMultiply(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + Var v1 = evalPop(ctx.expr(0)); + Var v2 = evalPop(ctx.expr(1)); + if (v1.value == null || v2.value == null) { + evalNull(); + } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) { + exec.stackPush(new Var((long) v1.value * (long) v2.value)); + } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((long) v1.value)).multiply((BigDecimal) v2.value))); + } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((long) v1.value * (double) v2.value)); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) { + exec.stackPush(new Var(((BigDecimal) v1.value).multiply((BigDecimal) v2.value))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((BigDecimal) v1.value).multiply(new BigDecimal((long) v2.value)))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) { + exec.stackPush(new Var(((BigDecimal) v1.value).multiply(new BigDecimal((double) v2.value)))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((double) v1.value * (double) v2.value)); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((double) v1.value)).multiply((BigDecimal) v2.value))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((double) v1.value) * (long) v2.value)); + } else { + unsupported(ctx, v1, v2, "*"); + } + } + + /** + * Division operator + */ + public void operatorDiv(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + Var v1 = evalPop(ctx.expr(0)); + Var v2 = evalPop(ctx.expr(1)); + if (v1.value == null || v2.value == null) { + evalNull(); + } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) { + exec.stackPush(new Var((long) v1.value / (long) v2.value)); + } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((long) v1.value)).divide((BigDecimal) v2.value))); + } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((long) v1.value / (double) v2.value)); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) { + exec.stackPush(new Var(((BigDecimal) v1.value).divide((BigDecimal) v2.value))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((BigDecimal) v1.value).divide(new BigDecimal((long) v2.value)))); + } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) { + exec.stackPush(new Var(((BigDecimal) v1.value).divide(new BigDecimal((double) v2.value)))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) { + exec.stackPush(new Var((double) v1.value / (double) v2.value)); + } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) { + exec.stackPush(new Var((new BigDecimal((double) v1.value)).divide((BigDecimal) v2.value))); + } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) { + exec.stackPush(new Var(((double) v1.value) / (long) v2.value)); + } else { + unsupported(ctx, v1, v2, "/"); + } + } + + private void unsupported(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx, Var op1, Var op2, String operator) { + String msg = String.format("Unsupported data types in '%s' operator (%s%s%s)", operator, op1.type, operator, + op2.type); + if (ctx != null) { + msg = "Ln:" + ctx.getStart().getLine() + " " + msg; + } + exec.signal(org.apache.doris.hplsql.Signal.Type.UNSUPPORTED_OPERATION, msg); + } + + /** + * Add or subtract the specified number of days from DATE + */ + public Var changeDateByInt(Date d, Long i, boolean add) { + Calendar c = Calendar.getInstance(); + c.setTimeInMillis(d.getTime()); + int days = i.intValue(); + if (!add) { + days *= -1; + } + c.add(Calendar.DAY_OF_MONTH, days); + return new Var(new Date(c.getTimeInMillis())); + } + + /** + * Equality operator + */ + public void operatorEqual(org.apache.doris.hplsql.HplsqlParser.Bool_expr_binaryContext ctx, boolean equal) { + Var v1 = evalPop(ctx.expr(0)); + Var v2 = evalPop(ctx.expr(1)); + boolean eq = v1.equals(v2); + if (!equal) { + eq = !eq; + } + exec.stackPush(eq); + } + + /** + * Comparison operator + */ + public void operatorCompare(org.apache.doris.hplsql.HplsqlParser.Bool_expr_binaryContext ctx, + org.apache.doris.hplsql.HplsqlParser.Bool_expr_binary_operatorContext op) { + Var v1 = evalPop(ctx.expr(0)); + Var v2 = evalPop(ctx.expr(1)); + int cmp = v1.compareTo(v2); + boolean bool = false; + if (op.T_GREATER() != null) { + if (cmp > 0) { + bool = true; + } + } else if (op.T_GREATEREQUAL() != null) { + if (cmp >= 0) { + bool = true; + } + } + if (op.T_LESS() != null) { + if (cmp < 0) { + bool = true; + } + } else if (op.T_LESSEQUAL() != null) { + if (cmp <= 0) { + bool = true; + } + } + exec.stackPush(bool); + } + + /** + * String concatenation operator + */ + public void operatorConcat(org.apache.doris.hplsql.HplsqlParser.Expr_concatContext ctx) { + StringBuilder val = new StringBuilder(); + int cnt = ctx.expr_concat_item().size(); + boolean nulls = true; + for (int i = 0; i < cnt; i++) { + Var c = evalPop(ctx.expr_concat_item(i)); + if (!c.isNull()) { + val.append(c.toString()); + nulls = false; + } + } + if (nulls) { + evalNull(); + } else { + evalString(val); + } + } + + /** + * String concatenation operator in executable SQL statement + */ + public void operatorConcatSql(org.apache.doris.hplsql.HplsqlParser.Expr_concatContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append("CONCAT("); + int cnt = ctx.expr_concat_item().size(); + for (int i = 0; i < cnt; i++) { + sql.append(evalPop(ctx.expr_concat_item(i)).toString()); + if (i + 1 < cnt) { + sql.append(", "); + } + } + sql.append(")"); + exec.stackPush(sql); + } + + /** + * Simple CASE expression + */ + public void execSimpleCase(org.apache.doris.hplsql.HplsqlParser.Expr_case_simpleContext ctx) { + int i = 1; + int cnt = ctx.expr().size(); + boolean found = false; + Var val = evalPop(ctx.expr(0)); + while (i < cnt) { + Var when = evalPop(ctx.expr(i)); + if (val.compareTo(when) == 0) { + visit(ctx.expr(i + 1)); + found = true; + break; + } + i += 2; + } + if (!found) { + if (ctx.T_ELSE() != null) { + visit(ctx.expr(cnt - 1)); + } else { + evalNull(); + } + } + } + + /** + * Simple CASE expression in executable SQL statement + */ + public void execSimpleCaseSql(org.apache.doris.hplsql.HplsqlParser.Expr_case_simpleContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append("CASE "); + sql.append(evalPop(ctx.expr(0)).toString()); + int cnt = ctx.T_WHEN().size(); + for (int i = 0; i < cnt; i++) { + sql.append(" WHEN "); + sql.append(evalPop(ctx.expr(i * 2 + 1)).toString()); + sql.append(" THEN "); + sql.append(evalPop(ctx.expr(i * 2 + 2)).toString()); + } + if (ctx.T_ELSE() != null) { + sql.append(" ELSE "); + sql.append(evalPop(ctx.expr(cnt * 2 + 1)).toString()); + } + sql.append(" END"); + exec.stackPush(sql); + } + + /** + * Searched CASE expression + */ + public void execSearchedCase(org.apache.doris.hplsql.HplsqlParser.Expr_case_searchedContext ctx) { + int cnt = ctx.bool_expr().size(); + boolean found = false; + for (int i = 0; i < cnt; i++) { + if (evalPop(ctx.bool_expr(i)).isTrue()) { + visit(ctx.expr(i)); + found = true; + break; + } + } + if (!found) { + if (ctx.T_ELSE() != null) { + visit(ctx.expr(cnt)); + } else { + evalNull(); + } + } + } + + /** + * Searched CASE expression in executable SQL statement + */ + public void execSearchedCaseSql(org.apache.doris.hplsql.HplsqlParser.Expr_case_searchedContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append("CASE"); + int cnt = ctx.T_WHEN().size(); + for (int i = 0; i < cnt; i++) { + sql.append(" WHEN "); + sql.append(evalPop(ctx.bool_expr(i)).toString()); + sql.append(" THEN "); + sql.append(evalPop(ctx.expr(i)).toString()); + } + if (ctx.T_ELSE() != null) { + sql.append(" ELSE "); + sql.append(evalPop(ctx.expr(cnt)).toString()); + } + sql.append(" END"); + exec.stackPush(sql); + } + + /** + * Create an interval variable + */ + public void createInterval(org.apache.doris.hplsql.HplsqlParser.ExprContext ctx) { + int num = evalPop(ctx.expr(0)).intValue(); + Interval interval = new Interval().set(num, ctx.interval_item().getText()); + exec.stackPush(new Var(interval)); + } + + /** + * Evaluate the expression and push the value to the stack + */ + void eval(ParserRuleContext ctx) { + visit(ctx); + } + + /** + * Evaluate the expression and pop value from the stack + */ + Var evalPop(ParserRuleContext ctx) { + visit(ctx); + if (!exec.stack.isEmpty()) { + return exec.stackPop(); + } + return Var.Empty; + } + + /** + * Evaluate the expression to specified String value + */ + void evalString(String string) { + exec.stackPush(new Var(string)); + } + + void evalString(StringBuilder string) { + evalString(string.toString()); + } + + /** + * Evaluate the expression to NULL + */ + void evalNull() { + exec.stackPush(Var.Null); + } + + /** + * Execute rules + */ + Integer visit(ParserRuleContext ctx) { + return exec.visit(ctx); + } + + /** + * Execute children rules + */ + Integer visitChildren(ParserRuleContext ctx) { + return exec.visitChildren(ctx); + } + + /** + * Trace information + */ + public void trace(ParserRuleContext ctx, String message) { + exec.trace(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/File.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/File.java new file mode 100644 index 00000000000000..cb5c7e75b90095 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/File.java @@ -0,0 +1,157 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/File.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import java.io.IOException; + +/** + * HDFS file operations + */ +public class File { + Path path; + FileSystem fs; + FSDataInputStream in; + FSDataOutputStream out; + + /** + * Create FileSystem object + */ + public FileSystem createFs() throws IOException { + fs = FileSystem.get(new Configuration()); + return fs; + } + + /** + * Create a file + */ + public FSDataOutputStream create(boolean overwrite) { + try { + if (fs == null) { + fs = createFs(); + } + out = fs.create(path, overwrite); + } catch (IOException e) { + e.printStackTrace(); + } + return out; + } + + public FSDataOutputStream create(String dir, String file, boolean overwrite) { + path = new Path(dir, file); + return create(overwrite); + } + + public FSDataOutputStream create(String file, boolean overwrite) { + path = new Path(file); + return create(overwrite); + } + + /** + * Open an existing file + */ + public void open(String dir, String file) { + path = new Path(dir, file); + try { + if (fs == null) { + fs = createFs(); + } + in = fs.open(path); + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * Check if the directory or file exists + * + * @throws IOException + */ + boolean exists(String name) throws IOException { + if (fs == null) { + fs = createFs(); + } + return fs.exists(new Path(name)); + } + + /** + * Read a character from input + * + * @throws IOException + */ + public char readChar() throws IOException { + return in.readChar(); + } + + /** + * Write string to file + */ + public void writeString(String str) { + try { + out.writeChars(str); + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * Close a file + */ + public void close() { + try { + if (in != null) { + in.close(); + } + if (out != null) { + out.close(); + } + in = null; + out = null; + path = null; + fs = null; + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * Get the fully-qualified path + * NOTE: FileSystem.resolvePath() is not available in Hadoop 1.2.1 + * + * @throws IOException + */ + public Path resolvePath(Path path) throws IOException { + return fs.getFileStatus(path).getPath(); + } + + @Override + public String toString() { + if (path != null) { + return "FILE <" + path.toString() + ">"; + } + return "FILE "; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Handler.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Handler.java new file mode 100644 index 00000000000000..755fe0d35c1f7b --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Handler.java @@ -0,0 +1,47 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Handler.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.Signal.Type; + +/** + * HPL/SQL condition and exception handler + */ +public class Handler { + public enum ExecType { + CONTINUE, EXIT + } + + ExecType execType; + Type type; + String value; + Scope scope; + org.apache.doris.hplsql.HplsqlParser.Declare_handler_itemContext ctx; + + Handler(ExecType execType, Type type, String value, Scope scope, + org.apache.doris.hplsql.HplsqlParser.Declare_handler_itemContext ctx) { + this.execType = execType; + this.type = type; + this.value = value; + this.scope = scope; + this.ctx = ctx; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Hplsql.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Hplsql.java new file mode 100644 index 00000000000000..9deb28a6b9757f --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Hplsql.java @@ -0,0 +1,27 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Hplsql.java +// and modified by Doris + +package org.apache.doris.hplsql; + +public class Hplsql { + public static void main(String[] args) throws Exception { + System.exit(new Exec().run(args)); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Interval.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Interval.java new file mode 100644 index 00000000000000..7a8389bd4df731 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Interval.java @@ -0,0 +1,111 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Interval.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.sql.Date; +import java.sql.Timestamp; +import java.util.Calendar; + +/** + * Date and time interval + */ +public class Interval { + int days = 0; + int milliseconds = 0; + + /** + * Add or subtract interval value to the specified date + */ + public Date dateChange(Date in, boolean add) { + Calendar c = Calendar.getInstance(); + c.setTimeInMillis(in.getTime()); + calendarChange(c, add); + return new Date(c.getTimeInMillis()); + } + + /** + * Add or subtract interval value to the specified timestamp + */ + public Timestamp timestampChange(Timestamp in, boolean add) { + Calendar c = Calendar.getInstance(); + c.setTimeInMillis(in.getTime()); + calendarChange(c, add); + return new Timestamp(c.getTimeInMillis()); + } + + /** + * Add interval value to the specified Calendar value + */ + public Calendar calendarChange(Calendar c, boolean add) { + int a = 1; + if (!add) { + a = -1; + } + if (days != 0) { + c.add(Calendar.DAY_OF_MONTH, days * a); + } + if (milliseconds != 0) { + c.setTimeInMillis(c.getTimeInMillis() + milliseconds * a); + } + return c; + } + + /** + * Set interval value + */ + public Interval set(int value, String item) { + if (item.compareToIgnoreCase("DAYS") == 0 || item.compareToIgnoreCase("DAY") == 0) { + setDays(value); + } + if (item.compareToIgnoreCase("MICROSECONDS") == 0 || item.compareToIgnoreCase("MICROSECOND") == 0) { + setMilliseconds(value); + } + return this; + } + + /** + * Set interval items + */ + public void setDays(int days) { + this.days = days; + } + + public void setMilliseconds(int milliseconds) { + this.milliseconds = milliseconds; + } + + /** + * Convert interval to string + */ + @Override + public String toString() { + StringBuilder s = new StringBuilder(); + if (days != 0) { + s.append(days); + s.append(" days"); + } + if (milliseconds != 0) { + s.append(milliseconds); + s.append(" milliseconds"); + } + return s.toString(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Meta.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Meta.java new file mode 100644 index 00000000000000..546885eea431e5 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Meta.java @@ -0,0 +1,314 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.executor.Metadata; +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.util.ArrayList; +import java.util.HashMap; + +/** + * Metadata + */ +public class Meta { + + HashMap> dataTypes = new HashMap>(); + + Exec exec; + boolean trace = false; + boolean info = false; + private QueryExecutor queryExecutor; + + Meta(Exec e, QueryExecutor queryExecutor) { + exec = e; + trace = exec.getTrace(); + info = exec.getInfo(); + this.queryExecutor = queryExecutor; + } + + /** + * Get the data type of column (column name is qualified i.e. schema.table.column) + */ + String getDataType(ParserRuleContext ctx, String conn, String column) { + String type = null; + HashMap map = dataTypes.get(conn); + if (map == null) { + map = new HashMap(); + dataTypes.put(conn, map); + } + ArrayList twoparts = splitIdentifierToTwoParts(column); + if (twoparts != null) { + String tab = twoparts.get(0); + String col = twoparts.get(1).toUpperCase(); + Row row = map.get(tab); + if (row != null) { + type = row.getType(col); + } else { + row = readColumns(ctx, conn, tab, map); + if (row != null) { + type = row.getType(col); + } + } + } + return type; + } + + /** + * Get data types for all columns of the table + */ + Row getRowDataType(ParserRuleContext ctx, String conn, String table) { + HashMap map = dataTypes.get(conn); + if (map == null) { + map = new HashMap(); + dataTypes.put(conn, map); + } + Row row = map.get(table); + if (row == null) { + row = readColumns(ctx, conn, table, map); + } + return row; + } + + /** + * Get data types for all columns of the SELECT statement + */ + Row getRowDataTypeForSelect(ParserRuleContext ctx, String conn, String select) { + Row row = null; + Conn.Type connType = exec.getConnectionType(conn); + // Hive does not support ResultSetMetaData on PreparedStatement, and Hive DESCRIBE + // does not support queries, so we have to execute the query with LIMIT 1 + if (connType == Conn.Type.HIVE) { + String sql = "SELECT * FROM (" + select + ") t LIMIT 1"; + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (!query.error()) { + try { + int cols = query.columnCount(); + row = new Row(); + for (int i = 0; i < cols; i++) { + String name = query.metadata().columnName(i); + if (name.startsWith("t.")) { + name = name.substring(2); + } + row.addColumnDefinition(name, query.metadata().columnTypeName(i)); + } + } catch (Exception e) { + exec.signal(e); + } + } else { + exec.signal(query.exception()); + } + query.close(); + } else { + QueryResult query = queryExecutor.executeQuery(select, ctx); + if (!query.error()) { + try { + Metadata rm = query.metadata(); + int cols = rm.columnCount(); + for (int i = 1; i <= cols; i++) { + String col = rm.columnName(i); + String typ = rm.columnTypeName(i); + if (row == null) { + row = new Row(); + } + row.addColumnDefinition(col.toUpperCase(), typ); + } + } catch (Exception e) { + exec.signal(e); + } + } + query.close(); + } + return row; + } + + /** + * Read the column data from the database and cache it + */ + Row readColumns(ParserRuleContext ctx, String conn, String table, HashMap map) { + Row row = null; + Conn.Type connType = exec.getConnectionType(conn); + if (connType == Conn.Type.HIVE) { + String sql = "DESCRIBE " + table; + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (!query.error()) { + try { + while (query.next()) { + String col = query.column(0, String.class); + String typ = query.column(1, String.class); + if (row == null) { + row = new Row(); + } + // Hive DESCRIBE outputs "empty_string NULL" row before partition information + if (typ == null) { + break; + } + row.addColumnDefinition(col.toUpperCase(), typ); + } + map.put(table, row); + } catch (Exception e) { + exec.signal(e); + } + } else { + exec.signal(query.exception()); + } + query.close(); + } else { + QueryResult query = queryExecutor.executeQuery("SELECT * FROM " + table, ctx); + if (!query.error()) { + try { + Metadata rm = query.metadata(); + int cols = query.columnCount(); + for (int i = 1; i <= cols; i++) { + String col = rm.columnName(i); + String typ = rm.columnTypeName(i); + if (row == null) { + row = new Row(); + } + row.addColumnDefinition(col.toUpperCase(), typ); + } + map.put(table, row); + } catch (Exception ignored) { + // ignored + } + } + query.close(); + } + return row; + } + + /** + * Normalize identifier for a database object (convert "" [] to `` i.e.) + */ + public String normalizeObjectIdentifier(String name) { + ArrayList parts = splitIdentifier(name); + if (parts != null) { // more then one part exist + StringBuilder norm = new StringBuilder(); + int size = parts.size(); + boolean appended = false; + for (int i = 0; i < size; i++) { + if (i == size - 2) { // schema name + String schema = getTargetSchemaName(parts.get(i)); + if (schema != null) { + norm.append(schema); + appended = true; + } + } else { + norm.append(normalizeIdentifierPart(parts.get(i))); + appended = true; + } + if (i + 1 < parts.size() && appended) { + norm.append("."); + } + } + return norm.toString(); + } + return normalizeIdentifierPart(name); + } + + /** + * Get the schema name to be used in the final executed SQL + */ + String getTargetSchemaName(String name) { + if (name.equalsIgnoreCase("dbo") || name.equalsIgnoreCase("[dbo]")) { + return null; + } + return normalizeIdentifierPart(name); + } + + /** + * Normalize identifier (single part) - convert "" [] to `` i.e. + */ + public String normalizeIdentifierPart(String name) { + char start = name.charAt(0); + char end = name.charAt(name.length() - 1); + if ((start == '[' && end == ']') || (start == '"' && end == '"')) { + return '`' + name.substring(1, name.length() - 1) + '`'; + } + return name; + } + + /** + * Split qualified object to 2 parts: schema.tab.col -> schema.tab|col; tab.col -> tab|col + */ + public ArrayList splitIdentifierToTwoParts(String name) { + ArrayList parts = splitIdentifier(name); + ArrayList twoparts = null; + if (parts != null) { + StringBuilder id = new StringBuilder(); + int i = 0; + for (; i < parts.size() - 1; i++) { + id.append(parts.get(i)); + if (i + 1 < parts.size() - 1) { + id.append("."); + } + } + twoparts = new ArrayList(); + twoparts.add(id.toString()); + id.setLength(0); + id.append(parts.get(i)); + twoparts.add(id.toString()); + } + return twoparts; + } + + /** + * Split identifier to parts (schema, table, colum name etc.) + * + * @return null if identifier contains single part + */ + public ArrayList splitIdentifier(String name) { + ArrayList parts = null; + int start = 0; + for (int i = 0; i < name.length(); i++) { + char c = name.charAt(i); + char del = '\0'; + if (c == '`' || c == '"') { + del = c; + } else if (c == '[') { + del = ']'; + } + if (del != '\0') { + for (int j = i + 1; i < name.length(); j++) { + i++; + if (name.charAt(j) == del) { + break; + } + } + continue; + } + if (c == '.') { + if (parts == null) { + parts = new ArrayList(); + } + parts.add(name.substring(start, i)); + start = i + 1; + } + } + if (parts != null) { + parts.add(name.substring(start)); + } + return parts; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Package.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Package.java new file mode 100644 index 00000000000000..4a4d3099d66f89 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Package.java @@ -0,0 +1,191 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Package.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.HplsqlParser.Create_function_stmtContext; +import org.apache.doris.hplsql.HplsqlParser.Create_procedure_stmtContext; +import org.apache.doris.hplsql.HplsqlParser.Package_body_itemContext; +import org.apache.doris.hplsql.HplsqlParser.Package_spec_itemContext; +import org.apache.doris.hplsql.functions.BuiltinFunctions; +import org.apache.doris.hplsql.functions.InMemoryFunctionRegistry; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Program package + */ +public class Package { + + private String name; + private List vars = new ArrayList<>(); + private List publicFuncs = new ArrayList<>(); + private List publicProcs = new ArrayList<>(); + + HashMap func = new HashMap<>(); + HashMap proc = new HashMap<>(); + + boolean allMembersPublic = false; + + Exec exec; + InMemoryFunctionRegistry function; + boolean trace = false; + + Package(String name, Exec exec, BuiltinFunctions builtinFunctions) { + this.name = name; + this.exec = exec; + this.function = new InMemoryFunctionRegistry(exec, builtinFunctions); + this.trace = exec.getTrace(); + } + + /** + * Add a local variable + */ + public void addVariable(Var var) { + vars.add(var); + } + + /** + * Find the variable by name + */ + public Var findVariable(String name) { + for (Var var : vars) { + if (name.equalsIgnoreCase(var.getName())) { + return var; + } + } + return null; + } + + /** + * Create the package specification + */ + public void createSpecification(org.apache.doris.hplsql.HplsqlParser.Create_package_stmtContext ctx) { + int cnt = ctx.package_spec().package_spec_item().size(); + for (int i = 0; i < cnt; i++) { + Package_spec_itemContext c = ctx.package_spec().package_spec_item(i); + if (c.declare_stmt_item() != null) { + visit(c); + } else if (c.T_FUNCTION() != null) { + publicFuncs.add(c.ident().getText().toUpperCase()); + } else if (c.T_PROC() != null || c.T_PROCEDURE() != null) { + publicProcs.add(c.ident().getText().toUpperCase()); + } + } + } + + /** + * Create the package body + */ + public void createBody(org.apache.doris.hplsql.HplsqlParser.Create_package_body_stmtContext ctx) { + int cnt = ctx.package_body().package_body_item().size(); + for (int i = 0; i < cnt; i++) { + Package_body_itemContext c = ctx.package_body().package_body_item(i); + if (c.declare_stmt_item() != null) { + visit(c); + } else if (c.create_function_stmt() != null) { + func.put(c.create_function_stmt().ident().getText().toUpperCase(), c.create_function_stmt()); + } else if (c.create_procedure_stmt() != null) { + proc.put(c.create_procedure_stmt().ident(0).getText().toUpperCase(), c.create_procedure_stmt()); + } + } + } + + /** + * Execute function + */ + public boolean execFunc(String name, org.apache.doris.hplsql.HplsqlParser.Expr_func_paramsContext ctx) { + Create_function_stmtContext f = func.get(name.toUpperCase()); + if (f == null) { + return execProc(name, ctx, false /*trace error if not exists*/); + } + if (trace) { + trace(ctx, "EXEC PACKAGE FUNCTION " + this.name + "." + name); + } + ArrayList actualParams = function.getActualCallParameters(ctx); + exec.enterScope(Scope.Type.ROUTINE, this); + InMemoryFunctionRegistry.setCallParameters(name, ctx, actualParams, f.create_routine_params(), null, exec); + visit(f.single_block_stmt()); + exec.leaveScope(); + return true; + } + + /** + * Execute procedure + */ + public boolean execProc(String name, org.apache.doris.hplsql.HplsqlParser.Expr_func_paramsContext ctx, + boolean traceNotExists) { + Create_procedure_stmtContext p = proc.get(name.toUpperCase()); + if (p == null) { + if (trace && traceNotExists) { + trace(ctx, "Package procedure not found: " + this.name + "." + name); + } + return false; + } + if (trace) { + trace(ctx, "EXEC PACKAGE PROCEDURE " + this.name + "." + name); + } + ArrayList actualParams = function.getActualCallParameters(ctx); + HashMap out = new HashMap(); + exec.enterScope(Scope.Type.ROUTINE, this); + exec.callStackPush(name); + if (p.declare_block_inplace() != null) { + visit(p.declare_block_inplace()); + } + if (p.create_routine_params() != null) { + InMemoryFunctionRegistry.setCallParameters(name, ctx, actualParams, p.create_routine_params(), out, exec); + } + visit(p.proc_block()); + exec.callStackPop(); + exec.leaveScope(); + for (Map.Entry i : out.entrySet()) { // Set OUT parameters + exec.setVariable(i.getKey(), i.getValue()); + } + return true; + } + + /** + * Set whether all members are public (when package specification is missed) or not + */ + void setAllMembersPublic(boolean value) { + allMembersPublic = value; + } + + /** + * Execute rules + */ + Integer visit(ParserRuleContext ctx) { + return exec.visit(ctx); + } + + /** + * Trace information + */ + public void trace(ParserRuleContext ctx, String message) { + if (trace) { + exec.trace(ctx, message); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Query.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Query.java new file mode 100644 index 00000000000000..3d3a6e6b51be5e --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Query.java @@ -0,0 +1,144 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Query.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +public class Query { + String sql; + Connection conn; + Statement stmt; + PreparedStatement pstmt; + ResultSet rs; + Exception exception; + + Query() { + } + + public Query(String sql) { + this.sql = sql; + } + + /** + * Set query objects + */ + public void set(Connection conn, Statement stmt, ResultSet rs) { + this.conn = conn; + this.stmt = stmt; + this.rs = rs; + } + + public void set(Connection conn, PreparedStatement pstmt) { + this.conn = conn; + this.pstmt = pstmt; + } + + /** + * Close statement results + */ + public void closeStatement() { + try { + if (rs != null) { + rs.close(); + rs = null; + } + if (stmt != null) { + stmt.close(); + stmt = null; + } + if (pstmt != null) { + pstmt.close(); + pstmt = null; + } + } catch (SQLException e) { + e.printStackTrace(); + } + } + + /** + * Set SQL statement + */ + public void setSql(String sql) { + this.sql = sql; + } + + /** + * Set an execution error + */ + public void setError(Exception e) { + exception = e; + } + + /** + * Print error stack trace + */ + public void printStackTrace() { + if (exception != null) { + exception.printStackTrace(); + } + } + + /** + * Get the result set object + */ + public ResultSet getResultSet() { + return rs; + } + + /** + * Get the prepared statement object + */ + public PreparedStatement getPreparedStatement() { + return pstmt; + } + + /** + * Get the connection object + */ + public Connection getConnection() { + return conn; + } + + /** + * Return error information + */ + public boolean error() { + return exception != null; + } + + public String errorText() { + if (exception != null) { + if (exception instanceof ClassNotFoundException) { + return "ClassNotFoundException: " + exception.getMessage(); + } + return exception.getMessage(); + } + return ""; + } + + public Exception getException() { + return exception; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Row.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Row.java new file mode 100644 index 00000000000000..1c8f54ed34c122 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Row.java @@ -0,0 +1,104 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Row.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * Table row (all columns) + */ +public class Row { + private final org.apache.doris.hplsql.ColumnMap colMap + = new org.apache.doris.hplsql.ColumnMap(); + + public Row() { + } + + Row(Row row) { + for (Column c : row.colMap.columns()) { + addColumnDefinition(c.getName(), c.getType()); + } + } + + /** + * Add a column with specified data type + */ + public void addColumnDefinition(String name, String type) { + colMap.add(new Column(name, type, null)); + } + + public void addColumn(String name, String type, Var value) { + Column column = new Column(name, type, value); + colMap.add(column); + } + + /** + * Get the data type by column name + */ + public String getType(String name) { + Column column = colMap.get(name); + return column != null ? column.getType() : null; + } + + /** + * Get value by index + */ + public Var getValue(int i) { + return colMap.at(i).getValue(); + } + + /** + * Get value by column name + */ + Var getValue(String name) { + Column column = colMap.get(name); + return column != null ? column.getValue() : null; + } + + /** + * Get columns + */ + List getColumns() { + return colMap.columns(); + } + + /** + * Get column by index + */ + public Column getColumn(int i) { + return colMap.at(i); + } + + /** + * Get the number of columns + */ + int size() { + return colMap.size(); + } + + public List columnDefinitions() { + return getColumns().stream().map(Column::definition).collect(Collectors.toList()); + } +} + + + diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Scope.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Scope.java new file mode 100644 index 00000000000000..51c75f8bb14dab --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Scope.java @@ -0,0 +1,80 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Scope.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +/** + * HPL/SQL block scope + */ +public class Scope { + + public enum Type { + GLOBAL, BEGIN_END, LOOP, HANDLER, PACKAGE, ROUTINE + } + + Map vars = new HashMap<>(); + ArrayList handlers = new ArrayList(); + Scope parent; + Type type; + Package pack; + + Scope(Type type) { + this.parent = null; + this.type = type; + this.pack = null; + } + + Scope(Scope parent, Type type) { + this.parent = parent; + this.type = type; + this.pack = null; + } + + Scope(Scope parent, Type type, Package pack) { + this.parent = parent; + this.type = type; + this.pack = pack; + } + + /** + * Add a local variable + */ + void addVariable(Var var) { + vars.put(var.name.toUpperCase(), var); + } + + /** + * Add a condition handler + */ + void addHandler(Handler handler) { + handlers.add(handler); + } + + /** + * Get the parent scope + */ + Scope getParent() { + return parent; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Select.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Select.java new file mode 100644 index 00000000000000..11afee0dbbfa7c --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Select.java @@ -0,0 +1,531 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Select.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.hplsql.exception.TypeException; +import org.apache.doris.hplsql.exception.UndefinedIdentException; +import org.apache.doris.hplsql.executor.HplsqlResult; +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; +import org.apache.doris.hplsql.executor.ResultListener; +import org.apache.doris.hplsql.objects.Table; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.misc.Interval; + +import java.util.List; +import java.util.Stack; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +public class Select { + Exec exec = null; + Stack stack = null; + Conf conf; + Console console; + ResultListener resultListener = ResultListener.NONE; + QueryExecutor queryExecutor; + + boolean trace = false; + + Select(Exec e, QueryExecutor queryExecutor) { + this.exec = e; + this.stack = exec.getStack(); + this.conf = exec.getConf(); + this.trace = exec.getTrace(); + this.console = exec.console; + this.queryExecutor = queryExecutor; + } + + public void setResultListener(ResultListener resultListener) { + this.resultListener = resultListener; + } + + /** + * Executing or building SELECT statement + */ + public Integer select(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx) { + if (ctx.parent instanceof org.apache.doris.hplsql.HplsqlParser.StmtContext) { + exec.stmtConnList.clear(); + trace(ctx, "SELECT"); + } + boolean oldBuildSql = exec.buildSql; + exec.buildSql = true; + StringBuilder sql = new StringBuilder(); + if (ctx.cte_select_stmt() != null) { + sql.append(evalPop(ctx.cte_select_stmt()).toString()); + sql.append("\n"); + } + sql.append(evalPop(ctx.fullselect_stmt()).toString()); + exec.buildSql = oldBuildSql; + // No need to execute at this stage + if (!(ctx.parent instanceof org.apache.doris.hplsql.HplsqlParser.StmtContext)) { + exec.stackPush(sql); + return 0; + } + if (trace) { + trace(ctx, sql.toString()); + } + if (exec.getOffline()) { + trace(ctx, "Not executed - offline mode set"); + return 0; + } + + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + + if (query.error()) { + exec.signal(query); + return 1; + } + trace(ctx, "SELECT completed successfully"); + exec.setSqlSuccess(); + try { + int intoCount = getIntoCount(ctx); + if (intoCount > 0) { + if (isBulkCollect(ctx)) { + trace(ctx, "SELECT BULK COLLECT INTO statement executed"); + long rowIndex = 1; + List
tables = exec.intoTables(ctx, intoVariableNames(ctx, intoCount)); + tables.forEach(Table::removeAll); + while (query.next()) { + for (int i = 0; i < intoCount; i++) { + Table table = tables.get(i); + table.populate(query, rowIndex, i); + } + rowIndex++; + } + } else { + trace(ctx, "SELECT INTO statement executed"); + if (query.next()) { + for (int i = 0; i < intoCount; i++) { + populateVariable(ctx, query, i); + } + exec.incRowCount(); + exec.setSqlSuccess(); + if (query.next()) { + exec.setSqlCode(SqlCodes.TOO_MANY_ROWS); + exec.signal(Signal.Type.TOO_MANY_ROWS); + } + } else { + exec.setSqlCode(SqlCodes.NO_DATA_FOUND); + exec.signal(Signal.Type.NOTFOUND); + } + } + } else if (ctx.parent instanceof org.apache.doris.hplsql.HplsqlParser.StmtContext) { + // Print all results for standalone SELECT statement + resultListener.onMetadata(query.metadata()); + int cols = query.columnCount(); + if (trace) { + trace(ctx, "Standalone SELECT executed: " + cols + " columns in the result set"); + } + while (query.next()) { + if (resultListener instanceof HplsqlResult) { + resultListener.onMysqlRow(query.mysqlRow()); + } else { + Object[] row = new Object[cols]; + for (int i = 0; i < cols; i++) { + row[i] = query.column(i, Object.class); + if (i > 0) { + console.print("\t"); + } + console.print(String.valueOf(row[i])); + } + console.printLine(""); + exec.incRowCount(); + + resultListener.onRow(row); + } + } + resultListener.onEof(); + } else { // Scalar subquery + trace(ctx, "Scalar subquery executed, first row and first column fetched only"); + if (query.next()) { + exec.stackPush(new Var().setValue(query, 1)); + exec.setSqlSuccess(); + } else { + evalNull(); + exec.setSqlCode(SqlCodes.NO_DATA_FOUND); + } + } + } catch (QueryException e) { + exec.signal(query); + query.close(); + return 1; + } + query.close(); + return 0; + } + + private void populateVariable(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx, QueryResult query, + int columnIndex) { + String intoName = getIntoVariable(ctx, columnIndex); + Var var = exec.findVariable(intoName); + if (var != null) { + if (var.type == Var.Type.HPL_OBJECT && var.value instanceof Table) { + Table table = (Table) var.value; + table.populate(query, getIntoTableIndex(ctx, columnIndex), columnIndex); + } else if (var.type == Var.Type.ROW) { + var.setRowValues(query); + } else { + var.setValue(query, columnIndex); + } + exec.trace(ctx, var, query.metadata(), columnIndex); + } else { + throw new UndefinedIdentException(ctx, intoName); + } + } + + /** + * Common table expression (WITH clause) + */ + public Integer cte(org.apache.doris.hplsql.HplsqlParser.Cte_select_stmtContext ctx) { + int cnt = ctx.cte_select_stmt_item().size(); + StringBuilder sql = new StringBuilder(); + sql.append("WITH "); + for (int i = 0; i < cnt; i++) { + org.apache.doris.hplsql.HplsqlParser.Cte_select_stmt_itemContext c = ctx.cte_select_stmt_item(i); + sql.append(c.qident().getText()); + if (c.cte_select_cols() != null) { + sql.append(" ").append(exec.getFormattedText(c.cte_select_cols())); + } + sql.append(" AS ("); + sql.append(evalPop(ctx.cte_select_stmt_item(i).fullselect_stmt()).toString()); + sql.append(")"); + if (i + 1 != cnt) { + sql.append(",\n"); + } + } + exec.stackPush(sql); + return 0; + } + + /** + * Part of SELECT + */ + public Integer fullselect(org.apache.doris.hplsql.HplsqlParser.Fullselect_stmtContext ctx) { + int cnt = ctx.fullselect_stmt_item().size(); + StringBuilder sql = new StringBuilder(); + for (int i = 0; i < cnt; i++) { + String part = evalPop(ctx.fullselect_stmt_item(i)).toString(); + sql.append(part); + if (i + 1 != cnt) { + sql.append("\n").append(getText(ctx.fullselect_set_clause(i))).append("\n"); + } + } + exec.stackPush(sql); + return 0; + } + + public Integer subselect(org.apache.doris.hplsql.HplsqlParser.Subselect_stmtContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append(ctx.start.getText()); + exec.append(sql, evalPop(ctx.select_list()).toString(), ctx.start, ctx.select_list().getStart()); + Token last = ctx.select_list().stop; + if (ctx.into_clause() != null) { + last = ctx.into_clause().stop; + } + if (ctx.from_clause() != null) { + exec.append(sql, evalPop(ctx.from_clause()).toString(), last, ctx.from_clause().getStart()); + last = ctx.from_clause().stop; + } else if (conf.dualTable != null) { + sql.append(" FROM ").append(conf.dualTable); + } + if (ctx.where_clause() != null) { + exec.append(sql, evalPop(ctx.where_clause()).toString(), last, ctx.where_clause().getStart()); + last = ctx.where_clause().stop; + } + if (ctx.group_by_clause() != null) { + exec.append(sql, getText(ctx.group_by_clause()), last, ctx.group_by_clause().getStart()); + last = ctx.group_by_clause().stop; + } + if (ctx.having_clause() != null) { + exec.append(sql, getText(ctx.having_clause()), last, ctx.having_clause().getStart()); + last = ctx.having_clause().stop; + } + if (ctx.qualify_clause() != null) { + exec.append(sql, getText(ctx.qualify_clause()), last, ctx.qualify_clause().getStart()); + last = ctx.qualify_clause().stop; + } + if (ctx.order_by_clause() != null) { + exec.append(sql, getText(ctx.order_by_clause()), last, ctx.order_by_clause().getStart()); + last = ctx.order_by_clause().stop; + } + if (ctx.select_options() != null) { + Var opt = evalPop(ctx.select_options()); + if (!opt.isNull()) { + sql.append(" " + opt.toString()); + } + } + if (ctx.select_list().select_list_limit() != null) { + sql.append(" LIMIT " + evalPop(ctx.select_list().select_list_limit().expr())); + } + exec.stackPush(sql); + return 0; + } + + /** + * SELECT list + */ + public Integer selectList(org.apache.doris.hplsql.HplsqlParser.Select_listContext ctx) { + StringBuilder sql = new StringBuilder(); + if (ctx.select_list_set() != null) { + sql.append(exec.getText(ctx.select_list_set())).append(" "); + } + int cnt = ctx.select_list_item().size(); + for (int i = 0; i < cnt; i++) { + if (ctx.select_list_item(i).select_list_asterisk() == null) { + sql.append(evalPop(ctx.select_list_item(i).expr())); + if (ctx.select_list_item(i).select_list_alias() != null) { + sql.append(" " + exec.getText(ctx.select_list_item(i).select_list_alias())); + } + } else { + sql.append(exec.getText(ctx.select_list_item(i).select_list_asterisk())); + } + if (i + 1 < cnt) { + sql.append(", "); + } + } + exec.stackPush(sql); + return 0; + } + + /** + * FROM clause + */ + public Integer from(org.apache.doris.hplsql.HplsqlParser.From_clauseContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append(ctx.T_FROM().getText()).append(" "); + sql.append(evalPop(ctx.from_table_clause())); + int cnt = ctx.from_join_clause().size(); + for (int i = 0; i < cnt; i++) { + sql.append(evalPop(ctx.from_join_clause(i))); + } + exec.stackPush(sql); + return 0; + } + + /** + * Single table name in FROM + */ + public Integer fromTable(org.apache.doris.hplsql.HplsqlParser.From_table_name_clauseContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append(evalPop(ctx.table_name())); + if (ctx.from_alias_clause() != null) { + sql.append(" ").append(exec.getText(ctx.from_alias_clause())); + } + exec.stackPush(sql); + return 0; + } + + /** + * Subselect in FROM + */ + public Integer fromSubselect(org.apache.doris.hplsql.HplsqlParser.From_subselect_clauseContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append("("); + sql.append(evalPop(ctx.select_stmt()).toString()); + sql.append(")"); + if (ctx.from_alias_clause() != null) { + sql.append(" ").append(exec.getText(ctx.from_alias_clause())); + } + exec.stackPush(sql); + return 0; + } + + /** + * JOIN clause in FROM + */ + public Integer fromJoin(org.apache.doris.hplsql.HplsqlParser.From_join_clauseContext ctx) { + StringBuilder sql = new StringBuilder(); + if (ctx.T_COMMA() != null) { + sql.append(", "); + sql.append(evalPop(ctx.from_table_clause())); + } else if (ctx.from_join_type_clause() != null) { + sql.append(" "); + sql.append(exec.getText(ctx.from_join_type_clause())); + sql.append(" "); + sql.append(evalPop(ctx.from_table_clause())); + sql.append(" "); + sql.append(exec.getText(ctx, ctx.T_ON().getSymbol(), ctx.bool_expr().getStop())); + } + exec.stackPush(sql); + return 0; + } + + /** + * FROM TABLE (VALUES ...) clause + */ + public Integer fromTableValues(org.apache.doris.hplsql.HplsqlParser.From_table_values_clauseContext ctx) { + StringBuilder sql = new StringBuilder(); + int rows = ctx.from_table_values_row().size(); + sql.append("("); + for (int i = 0; i < rows; i++) { + int cols = ctx.from_table_values_row(i).expr().size(); + int colsAs = ctx.from_alias_clause().L_ID().size(); + sql.append("SELECT "); + for (int j = 0; j < cols; j++) { + sql.append(evalPop(ctx.from_table_values_row(i).expr(j))); + if (j < colsAs) { + sql.append(" AS "); + sql.append(ctx.from_alias_clause().L_ID(j)); + } + if (j + 1 < cols) { + sql.append(", "); + } + } + if (conf.dualTable != null) { + sql.append(" FROM ").append(conf.dualTable); + } + if (i + 1 < rows) { + sql.append("\nUNION ALL\n"); + } + } + sql.append(") "); + if (ctx.from_alias_clause() != null) { + sql.append(ctx.from_alias_clause().qident().getText()); + } + exec.stackPush(sql); + return 0; + } + + /** + * WHERE clause + */ + public Integer where(org.apache.doris.hplsql.HplsqlParser.Where_clauseContext ctx) { + boolean oldBuildSql = exec.buildSql; + exec.buildSql = true; + StringBuilder sql = new StringBuilder(); + sql.append(ctx.T_WHERE().getText()); + sql.append(" ").append(evalPop(ctx.bool_expr())); + exec.stackPush(sql); + exec.buildSql = oldBuildSql; + return 0; + } + + /** + * Get INTO clause + */ + org.apache.doris.hplsql.HplsqlParser.Into_clauseContext getIntoClause( + org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx) { + if (ctx.fullselect_stmt().fullselect_stmt_item(0).subselect_stmt() != null) { + return ctx.fullselect_stmt().fullselect_stmt_item(0).subselect_stmt().into_clause(); + } + return null; + } + + /** + * Get number of elements in INTO or var=col assignment clause + */ + int getIntoCount(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx) { + org.apache.doris.hplsql.HplsqlParser.Into_clauseContext into = getIntoClause(ctx); + if (into != null) { + return into.ident().size() + into.table_row().size(); + } + List sl = ctx.fullselect_stmt() + .fullselect_stmt_item(0).subselect_stmt() + .select_list().select_list_item(); + if (sl.get(0).T_EQUAL() != null) { + return sl.size(); + } + return 0; + } + + private boolean isBulkCollect(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx) { + org.apache.doris.hplsql.HplsqlParser.Into_clauseContext into = getIntoClause(ctx); + return into != null && into.bulk_collect_clause() != null; + } + + /** + * Get variable name assigned in INTO or var=col clause by index + */ + String getIntoVariable(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx, int idx) { + org.apache.doris.hplsql.HplsqlParser.Into_clauseContext into = getIntoClause(ctx); + if (into != null) { + return into.table_row(idx) != null ? into.table_row(idx).ident().getText() : into.ident(idx).getText(); + } + org.apache.doris.hplsql.HplsqlParser.Select_list_itemContext sl = ctx.fullselect_stmt().fullselect_stmt_item(0) + .subselect_stmt() + .select_list().select_list_item(idx); + if (sl != null) { + return sl.qident().getText(); + } + return null; + } + + private List intoVariableNames(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx, int count) { + return IntStream.range(0, count) + .mapToObj(i -> getIntoVariable(ctx, i)) + .collect(Collectors.toList()); + } + + private int getIntoTableIndex(org.apache.doris.hplsql.HplsqlParser.Select_stmtContext ctx, int idx) { + org.apache.doris.hplsql.HplsqlParser.Into_clauseContext into = getIntoClause(ctx); + org.apache.doris.hplsql.HplsqlParser.Table_rowContext row = into.table_row(idx); + if (row == null) { + throw new TypeException(ctx, "Missing into table index"); + } + return Integer.parseInt(row.L_INT().getText()); + } + + /** + * SELECT statement options - LIMIT n, WITH UR i.e + */ + public Integer option(org.apache.doris.hplsql.HplsqlParser.Select_options_itemContext ctx) { + if (ctx.T_LIMIT() != null) { + exec.stackPush("LIMIT " + evalPop(ctx.expr())); + } + return 0; + } + + /** + * Evaluate the expression to NULL + */ + void evalNull() { + exec.stackPush(Var.Null); + } + + /** + * Evaluate the expression and pop value from the stack + */ + Var evalPop(ParserRuleContext ctx) { + exec.visit(ctx); + if (!exec.stack.isEmpty()) { + return exec.stackPop(); + } + return Var.Empty; + } + + /** + * Get node text including spaces + */ + String getText(ParserRuleContext ctx) { + return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); + } + + /** + * Trace information + */ + void trace(ParserRuleContext ctx, String message) { + exec.trace(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Signal.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Signal.java new file mode 100644 index 00000000000000..4c13c3e4c3ebde --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Signal.java @@ -0,0 +1,54 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Signal.java +// and modified by Doris + +package org.apache.doris.hplsql; + +/** + * Signals and exceptions + */ +public class Signal { + public enum Type { + LEAVE_LOOP, LEAVE_ROUTINE, LEAVE_PROGRAM, SQLEXCEPTION, NOTFOUND, TOO_MANY_ROWS, UNSUPPORTED_OPERATION, + USERDEFINED, VALIDATION + } + + Type type; + String value = ""; + Exception exception = null; + + Signal(Type type, String value) { + this.type = type; + this.value = value; + this.exception = null; + } + + Signal(Type type, String value, Exception exception) { + this.type = type; + this.value = value; + this.exception = exception; + } + + /** + * Get the signal value (message text) + */ + public String getValue() { + return value; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/SqlCodes.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/SqlCodes.java new file mode 100644 index 00000000000000..f6cdf8744b3917 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/SqlCodes.java @@ -0,0 +1,28 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/SqlCodes.java +// and modified by Doris + +package org.apache.doris.hplsql; + +public class SqlCodes { + public static int NO_DATA_FOUND = 100; + public static int TOO_MANY_ROWS = -1422; + public static int SUCCESS = 0; + public static int ERROR = -1; +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Stmt.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Stmt.java new file mode 100644 index 00000000000000..76cf954ba4b729 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Stmt.java @@ -0,0 +1,1613 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.Var.Type; +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.hplsql.executor.Metadata; +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; +import org.apache.doris.hplsql.objects.Table; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.Token; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Stack; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +/** + * HPL/SQL statements execution + */ +public class Stmt { + Exec exec = null; + Stack stack = null; + Conf conf; + Meta meta; + Console console; + + boolean trace = false; + private QueryExecutor queryExecutor; + + Stmt(Exec e, QueryExecutor queryExecutor) { + exec = e; + stack = exec.getStack(); + conf = exec.getConf(); + meta = exec.getMeta(); + trace = exec.getTrace(); + console = exec.console; + this.queryExecutor = queryExecutor; + } + + /** + * ALLOCATE CURSOR statement + */ + public Integer allocateCursor(org.apache.doris.hplsql.HplsqlParser.Allocate_cursor_stmtContext ctx) { + trace(ctx, "ALLOCATE CURSOR"); + String name = ctx.ident(0).getText(); + Var cur = null; + if (ctx.T_PROCEDURE() != null) { + cur = exec.consumeReturnCursor(ctx.ident(1).getText()); + } else if (ctx.T_RESULT() != null) { + cur = exec.findVariable(ctx.ident(1).getText()); + if (cur != null && cur.type != Type.RS_LOCATOR) { + cur = null; + } + } + if (cur == null) { + trace(ctx, "Cursor for procedure not found: " + name); + exec.signal(Signal.Type.SQLEXCEPTION); + return -1; + } + exec.addVariable(new Var(name, Type.CURSOR, cur.value)); + return 0; + } + + /** + * ASSOCIATE LOCATOR statement + */ + public Integer associateLocator(org.apache.doris.hplsql.HplsqlParser.Associate_locator_stmtContext ctx) { + trace(ctx, "ASSOCIATE LOCATOR"); + int cnt = ctx.ident().size(); + if (cnt < 2) { + return -1; + } + String procedure = ctx.ident(cnt - 1).getText(); + for (int i = 0; i < cnt - 1; i++) { + Var cur = exec.consumeReturnCursor(procedure); + if (cur != null) { + String name = ctx.ident(i).getText(); + Var loc = exec.findVariable(name); + if (loc == null) { + loc = new Var(name, Type.RS_LOCATOR, cur.value); + exec.addVariable(loc); + } else { + loc.setValue(cur.value); + } + } + } + return 0; + } + + /** + * DECLARE cursor statement + */ + public Integer declareCursor(org.apache.doris.hplsql.HplsqlParser.Declare_cursor_itemContext ctx) { + String name = ctx.ident().getText(); + if (trace) { + trace(ctx, "DECLARE CURSOR " + name); + } + Cursor cursor = new Cursor(null); + if (ctx.expr() != null) { + cursor.setExprCtx(ctx.expr()); + } else if (ctx.select_stmt() != null) { + cursor.setSelectCtx(ctx.select_stmt()); + } + if (ctx.cursor_with_return() != null) { + cursor.setWithReturn(true); + } + Var var = new Var(name, Type.CURSOR, cursor); + exec.addVariable(var); + return 0; + } + + /** + * CREATE TABLE statement + */ + public Integer createTable(org.apache.doris.hplsql.HplsqlParser.Create_table_stmtContext ctx) { + trace(ctx, "CREATE TABLE"); + StringBuilder sql = new StringBuilder(); + exec.append(sql, ctx.T_CREATE(), ctx.T_TABLE()); + exec.append(sql, evalPop(ctx.table_name()).toString(), ctx.T_TABLE().getSymbol(), ctx.table_name().getStart()); + Token last = ctx.table_name().getStop(); + if (ctx.create_table_preoptions() != null) { + String preopt = evalPop(ctx.create_table_preoptions()).toString(); + if (preopt != null) { + sql.append(" " + preopt); + } + last = ctx.create_table_preoptions().stop; + } + sql.append(createTableDefinition(ctx.create_table_definition(), last)); + trace(ctx, sql.toString()); + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * Get CREATE TABLE definition (columns or query) + */ + String createTableDefinition(org.apache.doris.hplsql.HplsqlParser.Create_table_definitionContext ctx, Token last) { + StringBuilder sql = new StringBuilder(); + org.apache.doris.hplsql.HplsqlParser.Create_table_columnsContext colCtx = ctx.create_table_columns(); + if (colCtx != null) { + int cnt = colCtx.create_table_columns_item().size(); + for (int i = 0; i < cnt; i++) { + org.apache.doris.hplsql.HplsqlParser.Create_table_columns_itemContext col + = colCtx.create_table_columns_item(i); + if (col.create_table_column_cons() != null) { + last = col.getStop(); + continue; + } + exec.append(sql, evalPop(col.column_name()).toString(), last, col.column_name().getStop()); + exec.append(sql, exec.evalPop(col.dtype(), col.dtype_len()), col.column_name().getStop(), + col.dtype().getStart()); + last = col.getStop(); + } + exec.append(sql, ctx.T_CLOSE_P().getText(), last, ctx.T_CLOSE_P().getSymbol()); + } else if (ctx.T_LIKE() != null) { + sql.append(" ").append(ctx.T_LIKE().getText()).append(" ").append(evalPop(ctx.table_name())); + } else { // CREATE TABLE AS SELECT statement + exec.append(sql, evalPop(ctx.select_stmt()).toString(), last, ctx.select_stmt().getStart()); + if (ctx.T_CLOSE_P() != null) { + exec.append(sql, ctx.T_CLOSE_P().getText(), ctx.select_stmt().stop, ctx.T_CLOSE_P().getSymbol()); + } + } + org.apache.doris.hplsql.HplsqlParser.Create_table_optionsContext options = ctx.create_table_options(); + if (options != null) { + String opt = evalPop(options).toString(); + if (opt != null) { + sql.append(" " + opt); + } + } + return sql.toString(); + } + + /** + * CREATE TABLE options for Hive + */ + public Integer createTableHiveOptions( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_hive_itemContext ctx) { + if (ctx.create_table_hive_row_format() != null) { + createTableHiveRowFormat(ctx.create_table_hive_row_format()); + } else if (ctx.T_STORED() != null) { + evalString(exec.getText(ctx)); + } + return 0; + } + + public Integer createTableHiveRowFormat( + org.apache.doris.hplsql.HplsqlParser.Create_table_hive_row_formatContext ctx) { + StringBuilder sql = new StringBuilder(); + sql.append("ROW FORMAT DELIMITED"); + int cnt = ctx.create_table_hive_row_format_fields().size(); + for (int i = 0; i < cnt; i++) { + org.apache.doris.hplsql.HplsqlParser.Create_table_hive_row_format_fieldsContext c + = ctx.create_table_hive_row_format_fields(i); + if (c.T_FIELDS() != null) { + sql.append(" FIELDS TERMINATED BY " + evalPop(c.expr(0)).toSqlString()); + } else if (c.T_LINES() != null) { + sql.append(" LINES TERMINATED BY " + evalPop(c.expr(0)).toSqlString()); + } + } + evalString(sql); + return 0; + } + + /** + * CREATE TABLE options for MySQL + */ + public Integer createTableMysqlOptions( + org.apache.doris.hplsql.HplsqlParser.Create_table_options_mysql_itemContext ctx) { + if (ctx.T_COMMENT() != null) { + evalString(ctx.T_COMMENT().getText() + " " + evalPop(ctx.expr()).toSqlString()); + } + return 0; + } + + /** + * DECLARE TEMPORARY TABLE statement + */ + public Integer declareTemporaryTable(org.apache.doris.hplsql.HplsqlParser.Declare_temporary_table_itemContext ctx) { + trace(ctx, "DECLARE TEMPORARY TABLE"); + return createTemporaryTable(ctx.qident(), ctx.create_table_definition(), ctx.create_table_preoptions()); + } + + /** + * CREATE DATABASE | SCHEMA statement + */ + public Integer createDatabase(org.apache.doris.hplsql.HplsqlParser.Create_database_stmtContext ctx) { + trace(ctx, "CREATE DATABASE"); + StringBuilder sql = new StringBuilder(); + sql.append(ctx.T_CREATE().getText() + " "); + if (ctx.T_DATABASE() != null) { + sql.append(ctx.T_DATABASE().getText() + " "); + } else { + sql.append(ctx.T_SCHEMA().getText() + " "); + } + if (ctx.T_IF() != null) { + sql.append(exec.getText(ctx, ctx.T_IF().getSymbol(), ctx.T_EXISTS().getSymbol()) + " "); + } + boolean oldBuildSql = exec.buildSql; + exec.buildSql = true; + sql.append(evalPop(ctx.expr()).toString()); + exec.buildSql = oldBuildSql; + int cnt = ctx.create_database_option().size(); + for (int i = 0; i < cnt; i++) { + org.apache.doris.hplsql.HplsqlParser.Create_database_optionContext option = ctx.create_database_option(i); + if (option.T_COMMENT() != null) { + sql.append(" " + option.T_COMMENT().getText() + " " + evalPop(option.expr()).toSqlString()); + } else if (option.T_LOCATION() != null) { + sql.append(" " + option.T_LOCATION().getText() + " " + evalPop(option.expr()).toSqlString()); + } + } + trace(ctx, sql.toString()); + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * CREATE LOCAL TEMPORARY | VOLATILE TABLE statement + */ + public Integer createLocalTemporaryTable( + org.apache.doris.hplsql.HplsqlParser.Create_local_temp_table_stmtContext ctx) { + trace(ctx, "CREATE LOCAL TEMPORARY TABLE"); + return createTemporaryTable(ctx.qident(), ctx.create_table_definition(), ctx.create_table_preoptions()); + } + + /** + * Create a temporary table statement + */ + public Integer createTemporaryTable(org.apache.doris.hplsql.HplsqlParser.QidentContext identCtx, + org.apache.doris.hplsql.HplsqlParser.Create_table_definitionContext defCtx, + org.apache.doris.hplsql.HplsqlParser.Create_table_preoptionsContext optCtx) { + StringBuilder sql = new StringBuilder(); + String name = identCtx.getText(); + String managedName = null; + Token last = identCtx.getStop(); + if (optCtx != null) { + last = optCtx.stop; + } + if (conf.tempTables == Conf.TempTables.NATIVE) { + sql.append("CREATE TEMPORARY TABLE " + name); + sql.append(createTableDefinition(defCtx, last)); + } else if (conf.tempTables == Conf.TempTables.MANAGED) { + managedName = name + "_" + UUID.randomUUID().toString().replace("-", ""); + if (!conf.tempTablesSchema.isEmpty()) { + managedName = conf.tempTablesSchema + "." + managedName; + } + sql.append("CREATE TABLE " + managedName); + sql.append(createTableDefinition(defCtx, last)); + if (!conf.tempTablesLocation.isEmpty()) { + sql.append("\nLOCATION '" + conf.tempTablesLocation + "/" + managedName + "'"); + } + if (trace) { + trace(null, "Managed table name: " + managedName); + } + } + if (trace) { + trace(null, sql.toString()); + } + if (sql != null) { + QueryResult query = queryExecutor.executeQuery(sql.toString(), null); + if (query.error()) { + exec.signal(query); + return 1; + } + if (managedName != null) { + exec.addManagedTable(name, managedName); + } + exec.setSqlSuccess(); + query.close(); + } + return 0; + } + + /** + * DESCRIBE statement + */ + public Integer describe(org.apache.doris.hplsql.HplsqlParser.Describe_stmtContext ctx) { + trace(ctx, "DESCRIBE"); + String sql = "DESCRIBE " + evalPop(ctx.table_name()).toString(); + trace(ctx, sql); + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + try { + while (query.next()) { + for (int i = 0; i < query.columnCount(); i++) { + if (i > 0) { + console.print("\t"); + } + console.print(query.column(i, String.class)); + } + console.printLine(""); + } + } catch (QueryException e) { + exec.signal(query); + query.close(); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * DROP statement + */ + public Integer drop(org.apache.doris.hplsql.HplsqlParser.Drop_stmtContext ctx) { + trace(ctx, "DROP"); + String sql = null; + if (ctx.T_TABLE() != null) { + sql = "DROP TABLE "; + if (ctx.T_EXISTS() != null) { + sql += "IF EXISTS "; + } + sql += evalPop(ctx.table_name()).toString(); + } else if (ctx.T_PACKAGE() != null) { + exec.dropPackage(ctx, ctx.ident().getText().toUpperCase(), ctx.T_EXISTS() != null); + } else if (ctx.T_PROCEDURE() != null || ctx.T_FUNCTION() != null) { + exec.dropProcedure(ctx, ctx.ident().getText().toUpperCase(), ctx.T_EXISTS() != null); + } else if (ctx.T_DATABASE() != null || ctx.T_SCHEMA() != null) { + sql = "DROP DATABASE "; + if (ctx.T_EXISTS() != null) { + sql += "IF EXISTS "; + } + sql += evalPop(ctx.expr()).toString(); + } + if (sql != null) { + trace(ctx, sql); + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + } + return 0; + } + + /** + * TRUNCATE statement + */ + public Integer truncate(org.apache.doris.hplsql.HplsqlParser.Truncate_stmtContext ctx) { + trace(ctx, "TRUNCATE"); + String sql = "TRUNCATE TABLE " + evalPop(ctx.table_name()).toString(); + trace(ctx, sql); + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * OPEN cursor statement + */ + public Integer open(org.apache.doris.hplsql.HplsqlParser.Open_stmtContext ctx) { + trace(ctx, "OPEN"); + Cursor cursor = null; + Var var = null; + String cursorName = ctx.ident().getText(); + String sql = null; + if (ctx.T_FOR() != null) { // SELECT statement or dynamic SQL + sql = ctx.expr() != null ? evalPop(ctx.expr()).toString() : evalPop(ctx.select_stmt()).toString(); + cursor = new Cursor(sql); + var = exec.findCursor(cursorName); // Can be a ref cursor variable + if (var == null) { + var = new Var(cursorName, Type.CURSOR, cursor); + exec.addVariable(var); + } else { + var.setValue(cursor); + } + } else { // Declared cursor + var = exec.findVariable(cursorName); + if (var != null && var.type == Type.CURSOR) { + cursor = (Cursor) var.value; + if (cursor.getSqlExpr() != null) { + sql = evalPop(cursor.getSqlExpr()).toString(); + cursor.setSql(sql); + } else if (cursor.getSqlSelect() != null) { + sql = evalPop(cursor.getSqlSelect()).toString(); + cursor.setSql(sql); + } + } + } + if (cursor != null) { + if (trace) { + trace(ctx, cursorName + ": " + sql); + } + cursor.open(queryExecutor, ctx); + QueryResult queryResult = cursor.getQueryResult(); + if (queryResult.error()) { + exec.signal(queryResult); + return 1; + } else if (!exec.getOffline()) { + exec.setSqlCode(SqlCodes.SUCCESS); + } + if (cursor.isWithReturn()) { + exec.addReturnCursor(var); + } + } else { + trace(ctx, "Cursor not found: " + cursorName); + exec.setSqlCode(SqlCodes.ERROR); + exec.signal(Signal.Type.SQLEXCEPTION); + return 1; + } + return 0; + } + + /** + * FETCH cursor statement + */ + public Integer fetch(org.apache.doris.hplsql.HplsqlParser.Fetch_stmtContext ctx) { + trace(ctx, "FETCH"); + String name = ctx.ident(0).getText(); + Var varCursor = exec.findCursor(name); + if (varCursor == null) { + trace(ctx, "Cursor not found: " + name); + exec.setSqlCode(SqlCodes.ERROR); + exec.signal(Signal.Type.SQLEXCEPTION); + return 1; + } else if (varCursor.value == null) { + trace(ctx, "Cursor not open: " + name); + exec.setSqlCode(SqlCodes.ERROR); + exec.signal(Signal.Type.SQLEXCEPTION); + return 1; + } else if (exec.getOffline()) { + exec.setSqlCode(SqlCodes.NO_DATA_FOUND); + exec.signal(Signal.Type.NOTFOUND); + return 0; + } + // Assign values from the row to local variables + try { + Cursor cursor = (Cursor) varCursor.value; + int cols = ctx.ident().size() - 1; + QueryResult queryResult = cursor.getQueryResult(); + + if (ctx.bulk_collect_clause() != null) { + long limit = ctx.fetch_limit() != null ? evalPop(ctx.fetch_limit().expr()).longValue() : -1; + long rowIndex = 1; + List
tables = exec.intoTables(ctx, intoVariableNames(ctx, cols)); + tables.forEach(Table::removeAll); + while (queryResult.next()) { + cursor.setFetch(true); + for (int i = 0; i < cols; i++) { + Table table = tables.get(i); + table.populate(queryResult, rowIndex, i); + } + rowIndex++; + if (limit != -1 && rowIndex - 1 >= limit) { + break; + } + } + } else { + if (queryResult.next()) { + cursor.setFetch(true); + for (int i = 0; i < cols; i++) { + Var var = exec.findVariable(ctx.ident(i + 1).getText()); + if (var != null) { + if (var.type != Var.Type.ROW) { + var.setValue(queryResult, i); + } else { + var.setRowValues(queryResult); + } + if (trace) { + trace(ctx, var, queryResult.metadata(), i); + } + } else if (trace) { + trace(ctx, "Variable not found: " + ctx.ident(i + 1).getText()); + } + } + exec.incRowCount(); + exec.setSqlSuccess(); + } else { + cursor.setFetch(false); + exec.setSqlCode(SqlCodes.NO_DATA_FOUND); + } + } + } catch (QueryException e) { + exec.setSqlCode(e); + exec.signal(Signal.Type.SQLEXCEPTION, e.getMessage(), e); + } + return 0; + } + + private List intoVariableNames(org.apache.doris.hplsql.HplsqlParser.Fetch_stmtContext ctx, int count) { + return IntStream.range(0, count).mapToObj(i -> ctx.ident(i + 1).getText()).collect(Collectors.toList()); + } + + + /** + * CLOSE cursor statement + */ + public Integer close(org.apache.doris.hplsql.HplsqlParser.Close_stmtContext ctx) { + trace(ctx, "CLOSE"); + String name = ctx.L_ID().toString(); + Var var = exec.findVariable(name); + if (var != null && var.type == Type.CURSOR) { + ((Cursor) var.value).close(); + exec.setSqlCode(SqlCodes.SUCCESS); + } else if (trace) { + trace(ctx, "Cursor not found: " + name); + } + return 0; + } + + /** + * INCLUDE statement + */ + public Integer include(org.apache.doris.hplsql.HplsqlParser.Include_stmtContext ctx) { + String file; + if (ctx.file_name() != null) { + file = ctx.file_name().getText(); + } else { + file = evalPop(ctx.expr()).toString(); + } + trace(ctx, "INCLUDE " + file); + exec.includeFile(file, true); + return 0; + } + + /** + * IF statement (PL/SQL syntax) + */ + public Integer ifPlsql(org.apache.doris.hplsql.HplsqlParser.If_plsql_stmtContext ctx) { + boolean trueExecuted = false; + trace(ctx, "IF"); + if (evalPop(ctx.bool_expr()).isTrue()) { + trace(ctx, "IF TRUE executed"); + visit(ctx.block()); + trueExecuted = true; + } else if (ctx.elseif_block() != null) { + int cnt = ctx.elseif_block().size(); + for (int i = 0; i < cnt; i++) { + if (evalPop(ctx.elseif_block(i).bool_expr()).isTrue()) { + trace(ctx, "ELSE IF executed"); + visit(ctx.elseif_block(i).block()); + trueExecuted = true; + break; + } + } + } + if (!trueExecuted && ctx.else_block() != null) { + trace(ctx, "ELSE executed"); + visit(ctx.else_block()); + } + return 0; + } + + /** + * IF statement (Transact-SQL syntax) + */ + public Integer ifTsql(org.apache.doris.hplsql.HplsqlParser.If_tsql_stmtContext ctx) { + trace(ctx, "IF"); + visit(ctx.bool_expr()); + if (exec.stackPop().isTrue()) { + trace(ctx, "IF TRUE executed"); + visit(ctx.single_block_stmt(0)); + } else if (ctx.T_ELSE() != null) { + trace(ctx, "ELSE executed"); + visit(ctx.single_block_stmt(1)); + } + return 0; + } + + /** + * IF statement (BTEQ syntax) + */ + public Integer ifBteq(org.apache.doris.hplsql.HplsqlParser.If_bteq_stmtContext ctx) { + trace(ctx, "IF"); + visit(ctx.bool_expr()); + if (exec.stackPop().isTrue()) { + trace(ctx, "IF TRUE executed"); + visit(ctx.single_block_stmt()); + } + return 0; + } + + /** + * Assignment from SELECT statement + */ + public Integer assignFromSelect(org.apache.doris.hplsql.HplsqlParser.Assignment_stmt_select_itemContext ctx) { + String sql = evalPop(ctx.select_stmt()).toString(); + if (trace) { + trace(ctx, sql); + } + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + try { + int cnt = ctx.ident().size(); + if (query.next()) { + for (int i = 0; i < cnt; i++) { + Var var = exec.findVariable(ctx.ident(i).getText()); + if (var != null) { + var.setValue(query, i); + if (trace) { + trace(ctx, "COLUMN: " + query.metadata().columnName(i) + ", " + query.metadata() + .columnTypeName(i)); + trace(ctx, "SET " + var.getName() + " = " + var.toString()); + } + } else if (trace) { + trace(ctx, "Variable not found: " + ctx.ident(i).getText()); + } + } + exec.incRowCount(); + exec.setSqlSuccess(); + } else { + exec.setSqlCode(SqlCodes.NO_DATA_FOUND); + exec.signal(Signal.Type.NOTFOUND); + } + } catch (QueryException e) { + exec.signal(query); + return 1; + } finally { + query.close(); + } + return 0; + } + + /** + * SQL INSERT statement + */ + public Integer insert(org.apache.doris.hplsql.HplsqlParser.Insert_stmtContext ctx) { + exec.stmtConnList.clear(); + if (ctx.select_stmt() != null) { + return insertSelect(ctx); + } + return insertValues(ctx); + } + + /** + * SQL INSERT SELECT statement + */ + public Integer insertSelect(org.apache.doris.hplsql.HplsqlParser.Insert_stmtContext ctx) { + trace(ctx, "INSERT SELECT"); + StringBuilder sql = new StringBuilder(); + sql.append(ctx.T_INSERT().getText() + " "); + if (ctx.T_OVERWRITE() != null) { + sql.append(ctx.T_OVERWRITE().getText() + " " + ctx.T_TABLE().getText() + " "); + } else { + sql.append(ctx.T_INTO().getText() + " "); + if (ctx.T_TABLE() != null) { + sql.append(ctx.T_TABLE().getText() + " "); + } + } + sql.append(evalPop(ctx.table_name()).toString() + " "); + sql.append(evalPop(ctx.select_stmt()).toString()); + trace(ctx, sql.toString()); + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * SQL INSERT VALUES statement + */ + public Integer insertValues(org.apache.doris.hplsql.HplsqlParser.Insert_stmtContext ctx) { + trace(ctx, "INSERT VALUES"); + String table = evalPop(ctx.table_name()).toString(); + String conn = exec.getObjectConnection(ctx.table_name().getText()); + Conn.Type type = exec.getConnectionType(conn); + StringBuilder sql = new StringBuilder(); + if (type == Conn.Type.HIVE) { + sql.append("INSERT INTO TABLE " + table + " "); + if (conf.insertValues == Conf.InsertValues.NATIVE) { + sql.append("VALUES\n("); + } + } else { + sql.append("INSERT INTO " + table); + if (ctx.insert_stmt_cols() != null) { + sql.append(" " + exec.getFormattedText(ctx.insert_stmt_cols())); + } + sql.append(" VALUES\n("); + } + int rows = ctx.insert_stmt_rows().insert_stmt_row().size(); + for (int i = 0; i < rows; i++) { + org.apache.doris.hplsql.HplsqlParser.Insert_stmt_rowContext row = ctx.insert_stmt_rows().insert_stmt_row(i); + int cols = row.expr().size(); + for (int j = 0; j < cols; j++) { + String value = evalPop(row.expr(j)).toSqlString(); + if (j == 0 && type == Conn.Type.HIVE && conf.insertValues == Conf.InsertValues.SELECT) { + sql.append("SELECT "); + } + sql.append(value); + if (j + 1 != cols) { + sql.append(", "); + } + } + if (type != Conn.Type.HIVE || conf.insertValues == Conf.InsertValues.NATIVE) { + if (i + 1 == rows) { + sql.append(")"); + } else { + sql.append("),\n("); + } + } else if (type == Conn.Type.HIVE && conf.insertValues == Conf.InsertValues.SELECT) { + if (conf.dualTable != null) { + sql.append(" FROM " + conf.dualTable); + } + if (i + 1 < rows) { + sql.append("\nUNION ALL\n"); + } + } + } + if (trace) { + trace(ctx, sql.toString()); + } + + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * INSERT DIRECTORY statement + */ + public Integer insertDirectory(org.apache.doris.hplsql.HplsqlParser.Insert_directory_stmtContext ctx) { + trace(ctx, "INSERT DIRECTORY"); + StringBuilder sql = new StringBuilder(); + sql.append(ctx.T_INSERT().getText() + " " + ctx.T_OVERWRITE().getText() + " "); + if (ctx.T_LOCAL() != null) { + sql.append(ctx.T_LOCAL().getText() + " "); + } + sql.append(ctx.T_DIRECTORY().getText() + " " + evalPop(ctx.expr_file()).toSqlString() + " "); + sql.append(evalPop(ctx.expr_select()).toString()); + trace(ctx, sql.toString()); + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * GET DIAGNOSTICS EXCEPTION statement + */ + public Integer getDiagnosticsException( + org.apache.doris.hplsql.HplsqlParser.Get_diag_stmt_exception_itemContext ctx) { + trace(ctx, "GET DIAGNOSTICS EXCEPTION"); + Signal signal = exec.signalPeek(); + if (signal == null || (signal != null && signal.type != Signal.Type.SQLEXCEPTION)) { + signal = exec.currentSignal; + } + if (signal != null) { + exec.setVariable(ctx.qident().getText(), signal.getValue()); + } + return 0; + } + + /** + * GET DIAGNOSTICS ROW_COUNT statement + */ + public Integer getDiagnosticsRowCount(org.apache.doris.hplsql.HplsqlParser.Get_diag_stmt_rowcount_itemContext ctx) { + trace(ctx, "GET DIAGNOSTICS ROW_COUNT"); + exec.setVariable(ctx.qident().getText(), exec.getRowCount()); + return 0; + } + + /** + * USE statement + */ + public Integer use(org.apache.doris.hplsql.HplsqlParser.Use_stmtContext ctx) { + trace(ctx, "USE"); + return use(ctx, ctx.T_USE().toString() + " " + meta.normalizeIdentifierPart(ctx.expr().getText())); + } + + public Integer use(ParserRuleContext ctx, String sql) { + if (trace) { + trace(ctx, "SQL statement: " + sql); + } + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlCode(SqlCodes.SUCCESS); + query.close(); + return 0; + } + + /** + * VALUES statement + */ + public Integer values(org.apache.doris.hplsql.HplsqlParser.Values_into_stmtContext ctx) { + trace(ctx, "VALUES statement"); + int cnt = ctx.ident().size(); // Number of variables and assignment expressions + int ecnt = ctx.expr().size(); + for (int i = 0; i < cnt; i++) { + String name = ctx.ident(i).getText(); + if (i < ecnt) { + visit(ctx.expr(i)); + Var var = exec.setVariable(name); + if (trace) { + trace(ctx, "SET " + name + " = " + var.toString()); + } + } + } + return 0; + } + + /** + * WHILE statement + */ + public Integer while_(org.apache.doris.hplsql.HplsqlParser.While_stmtContext ctx) { + trace(ctx, "WHILE - ENTERED"); + String label = exec.labelPop(); + while (true) { + if (evalPop(ctx.bool_expr()).isTrue()) { + exec.enterScope(Scope.Type.LOOP); + visit(ctx.block()); + exec.leaveScope(); + if (canContinue(label)) { + continue; + } + } + break; + } + trace(ctx, "WHILE - LEFT"); + return 0; + } + + /** + * FOR cursor statement + */ + public Integer forCursor(org.apache.doris.hplsql.HplsqlParser.For_cursor_stmtContext ctx) { + trace(ctx, "FOR CURSOR - ENTERED"); + exec.enterScope(Scope.Type.LOOP); + String cursor = ctx.L_ID().getText(); + String sql = evalPop(ctx.select_stmt()).toString(); + trace(ctx, sql); + QueryResult query = exec.queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + trace(ctx, "SELECT completed successfully"); + exec.setSqlSuccess(); + try { + int cols = query.columnCount(); + Row row = new Row(); + for (int i = 0; i < cols; i++) { + row.addColumnDefinition(query.metadata().columnName(i), query.metadata().columnTypeName(i)); + } + Var var = new Var(cursor, row); + exec.addVariable(var); + while (query.next()) { + var.setRowValues(query); + if (trace) { + trace(ctx, var, query.metadata(), 0); + } + visit(ctx.block()); + exec.incRowCount(); + } + } catch (QueryException e) { + exec.signal(e); + query.close(); + return 1; + } + exec.setSqlSuccess(); + query.close(); + exec.leaveScope(); + trace(ctx, "FOR CURSOR - LEFT"); + return 0; + } + + /** + * FOR (integer range) statement + */ + public Integer forRange(org.apache.doris.hplsql.HplsqlParser.For_range_stmtContext ctx) { + trace(ctx, "FOR RANGE - ENTERED"); + int start = evalPop(ctx.expr(0)).intValue(); + int end = evalPop(ctx.expr(1)).intValue(); + int step = evalPop(ctx.expr(2), 1L).intValue(); + exec.enterScope(Scope.Type.LOOP); + Var index = setIndex(start, end, ctx); + exec.addVariable(index); + for (int i = start; i <= end; i += step) { + visit(ctx.block()); + updateIndex(step, index, ctx); + } + exec.leaveScope(); + trace(ctx, "FOR RANGE - LEFT"); + return 0; + } + + public Integer unconditionalLoop(org.apache.doris.hplsql.HplsqlParser.Unconditional_loop_stmtContext ctx) { + trace(ctx, "UNCONDITIONAL LOOP - ENTERED"); + String label = exec.labelPop(); + while (true) { + exec.enterScope(Scope.Type.LOOP); + visit(ctx.block()); + exec.leaveScope(); + if (!canContinue(label)) { + break; + } + } + trace(ctx, "UNCONDITIONAL LOOP - LEFT"); + return 0; + } + + /** + * To set the Value index for FOR Statement + */ + private Var setIndex(int start, int end, org.apache.doris.hplsql.HplsqlParser.For_range_stmtContext ctx) { + + if (ctx.T_REVERSE() == null) { + return new Var(ctx.L_ID().getText(), Long.valueOf(start)); + } else { + return new Var(ctx.L_ID().getText(), Long.valueOf(end)); + } + } + + /** + * To update the value of index for FOR Statement + */ + private void updateIndex(int step, Var index, org.apache.doris.hplsql.HplsqlParser.For_range_stmtContext ctx) { + + if (ctx.T_REVERSE() == null) { + index.increment(step); + } else { + index.decrement(step); + } + } + + /** + * EXEC, EXECUTE and EXECUTE IMMEDIATE statement to execute dynamic SQL or stored procedure + */ + public Integer exec(org.apache.doris.hplsql.HplsqlParser.Exec_stmtContext ctx) { + if (execProc(ctx)) { + return 0; + } + trace(ctx, "EXECUTE"); + Var vsql = evalPop(ctx.expr()); + String sql = vsql.toString(); + if (trace) { + trace(ctx, "SQL statement: " + sql); + } + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + try { + if (ctx.T_INTO() != null) { + int cols = ctx.L_ID().size(); + if (query.next()) { + for (int i = 0; i < cols; i++) { + Var var = exec.findVariable(ctx.L_ID(i).getText()); + if (var != null) { + if (var.type != Type.ROW) { + var.setValue(query, i); + } else { + var.setRowValues(query); + } + if (trace) { + trace(ctx, var, query.metadata(), i); + } + } else if (trace) { + trace(ctx, "Variable not found: " + ctx.L_ID(i).getText()); + } + } + exec.setSqlCode(SqlCodes.SUCCESS); + } + } else { // Print the results + int cols = query.columnCount(); + while (query.next()) { + for (int i = 0; i < cols; i++) { + if (i > 1) { + console.print("\t"); + } + console.print(query.column(i, String.class)); + } + console.printLine(""); + } + } + } catch (QueryException e) { + exec.signal(query); + query.close(); + return 1; + } + query.close(); + return 0; + } + + /** + * EXEC to execute a stored procedure + */ + public Boolean execProc(org.apache.doris.hplsql.HplsqlParser.Exec_stmtContext ctx) { + String name = evalPop(ctx.expr()).toString().toUpperCase(); + if (exec.functions.exec(name, ctx.expr_func_params())) { + return true; + } + return false; + } + + /** + * EXIT statement (leave the specified loop with a condition) + */ + public Integer exit(org.apache.doris.hplsql.HplsqlParser.Exit_stmtContext ctx) { + trace(ctx, "EXIT"); + String label = ""; + if (ctx.L_ID() != null) { + label = ctx.L_ID().toString(); + } + if (ctx.T_WHEN() != null) { + if (evalPop(ctx.bool_expr()).isTrue()) { + leaveLoop(label); + } + } else { + leaveLoop(label); + } + return 0; + } + + /** + * BREAK statement (leave the innermost loop unconditionally) + */ + public Integer break_(org.apache.doris.hplsql.HplsqlParser.Break_stmtContext ctx) { + trace(ctx, "BREAK"); + leaveLoop(""); + return 0; + } + + /** + * LEAVE statement (leave the specified loop unconditionally) + */ + public Integer leave(org.apache.doris.hplsql.HplsqlParser.Leave_stmtContext ctx) { + trace(ctx, "LEAVE"); + String label = ""; + if (ctx.L_ID() != null) { + label = ctx.L_ID().toString(); + } + leaveLoop(label); + return 0; + } + + /** + * Leave the specified or innermost loop unconditionally + */ + public void leaveLoop(String value) { + exec.signal(Signal.Type.LEAVE_LOOP, value); + } + + /** + * UPDATE statement + */ + public Integer update(org.apache.doris.hplsql.HplsqlParser.Update_stmtContext ctx) { + trace(ctx, "UPDATE"); + String sql = exec.getFormattedText(ctx); + trace(ctx, sql); + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * DELETE statement + */ + public Integer delete(org.apache.doris.hplsql.HplsqlParser.Delete_stmtContext ctx) { + trace(ctx, "DELETE"); + String table = evalPop(ctx.table_name()).toString(); + StringBuilder sql = new StringBuilder(); + if (ctx.T_ALL() == null) { + sql.append("DELETE FROM " + table); + if (ctx.where_clause() != null) { + boolean oldBuildSql = exec.buildSql; + exec.buildSql = true; + sql.append(" " + evalPop(ctx.where_clause()).toString()); + exec.buildSql = oldBuildSql; + } + } else { + sql.append("TRUNCATE TABLE " + table); + } + trace(ctx, sql.toString()); + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * MERGE statement + */ + public Integer merge(org.apache.doris.hplsql.HplsqlParser.Merge_stmtContext ctx) { + trace(ctx, "MERGE"); + String sql = exec.getFormattedText(ctx); + trace(ctx, sql); + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + query.close(); + return 0; + } + + /** + * PRINT Statement + */ + public Integer print(org.apache.doris.hplsql.HplsqlParser.Print_stmtContext ctx) { + trace(ctx, "PRINT"); + if (ctx.expr() != null) { + console.printLine(evalPop(ctx.expr()).toString()); + } + return 0; + } + + /** + * QUIT Statement + */ + public Integer quit(org.apache.doris.hplsql.HplsqlParser.Quit_stmtContext ctx) { + trace(ctx, "QUIT"); + String rc = null; + if (ctx.expr() != null) { + rc = evalPop(ctx.expr()).toString(); + } + exec.signal(Signal.Type.LEAVE_PROGRAM, rc); + return 0; + } + + /** + * SET current schema + */ + public Integer setCurrentSchema(org.apache.doris.hplsql.HplsqlParser.Set_current_schema_optionContext ctx) { + trace(ctx, "SET CURRENT SCHEMA"); + return use(ctx, "USE " + meta.normalizeIdentifierPart(evalPop(ctx.expr()).toString())); + } + + /** + * SIGNAL statement + */ + public Integer signal(org.apache.doris.hplsql.HplsqlParser.Signal_stmtContext ctx) { + trace(ctx, "SIGNAL"); + Signal signal = new Signal(Signal.Type.USERDEFINED, ctx.ident().getText()); + exec.signal(signal); + return 0; + } + + /** + * SUMMARY statement + */ + public Integer summary(org.apache.doris.hplsql.HplsqlParser.Summary_stmtContext ctx) { + trace(ctx, "SUMMARY"); + String table = null; + String select = null; + String conn = null; + Row row = null; + if (ctx.table_name() != null) { + table = evalPop(ctx.table_name()).toString(); + conn = exec.getObjectConnection(table); + row = meta.getRowDataType(ctx, conn, table); + } else { + select = evalPop(ctx.select_stmt()).toString(); + conn = exec.getStatementConnection(); + row = meta.getRowDataTypeForSelect(ctx, conn, select); + } + if (row == null) { + return 1; + } + Conn.Type connType = exec.getConnectionType(conn); + if (ctx.T_TOP() == null) { + return summaryStat(ctx, table, select, row, conn, connType); + } else { + return summaryTop(ctx, table, select, row, conn, connType); + } + } + + // Summary for column statistics + public Integer summaryStat(org.apache.doris.hplsql.HplsqlParser.Summary_stmtContext ctx, String table, + String select, Row row, String conn, Conn.Type connType) { + StringBuilder sql = new StringBuilder("SELECT COUNT(*)"); + int maxColName = 11; + // Define summary metrics for each column + for (Column c : row.getColumns()) { + String col = c.getName(); + if (connType == Conn.Type.HIVE) { + col = '`' + col + '`'; + } + sql.append(",COUNT(" + col + "),"); + sql.append("COUNT(DISTINCT " + col + "),"); + sql.append("AVG(" + col + "),"); + sql.append("MIN(" + col + "),"); + sql.append("MAX(" + col + "),"); + sql.append("STDDEV_SAMP(" + col + "),"); + sql.append("PERCENTILE_APPROX(CAST(" + col + " AS DOUBLE),0.05),"); + sql.append("PERCENTILE_APPROX(CAST(" + col + " AS DOUBLE),0.25),"); + sql.append("PERCENTILE_APPROX(CAST(" + col + " AS DOUBLE),0.5),"); + sql.append("PERCENTILE_APPROX(CAST(" + col + " AS DOUBLE),0.75),"); + sql.append("PERCENTILE_APPROX(CAST(" + col + " AS DOUBLE),0.95)"); + if (col.length() > maxColName) { + maxColName = col.length(); + } + } + if (table != null) { + sql.append(" FROM (SELECT * FROM " + table); + if (ctx.where_clause() != null) { + sql.append(" " + evalPop(ctx.where_clause()).toString()); + } + if (ctx.T_LIMIT() != null) { + sql.append(" LIMIT "); + int limExp = 0; + if (ctx.T_TOP() != null) { + limExp = 1; + } + sql.append(evalPop(ctx.expr(limExp)).toString()); + } + sql.append(") t"); + } else { + sql.append(" FROM (" + select + ") t"); + } + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + try { + System.out.print("\n"); + // The summary query returns only one row + if (query.next()) { + int i = 0; + int cc = 11; + String cntRows = query.column(0, String.class); + // Pad output + String fmt = String.format("%%-%ds\t%%-11s\t%%-11s\t%%-11s\t%%-11s\t%%-11s\t%%-11s\t%%-11s\t%%-11s" + + "\t%%-11s\t%%-11s\t%%-11s\t%%-11s\t%%-11s\n", maxColName + 1); + System.out.print( + String.format(fmt, "Column", "Type", "Rows", "NonNull", "Unique", "Avg", "Min", "Max", "StdDev", + "p05", "p25", "p50", "p75", "p95")); + for (Column c : row.getColumns()) { + String avg = String.format("%.2f", query.column(3 + i * cc, Double.class)); + String stddev = String.format("%.2f", query.column(6 + i * cc, Double.class)); + String p05 = String.format("%.2f", query.column(7 + i * cc, Double.class)); + String p25 = String.format("%.2f", query.column(8 + i * cc, Double.class)); + String p50 = String.format("%.2f", query.column(9 + i * cc, Double.class)); + String p75 = String.format("%.2f", query.column(10 + i * cc, Double.class)); + String p95 = String.format("%.2f", query.column(11 + i * cc, Double.class)); + System.out.print(String.format(fmt, c.getName(), c.getType(), cntRows, + query.column(1 + i * cc, String.class), query.column(2 + i * cc, String.class), avg, + query.column(4 + i * cc, String.class), query.column(5 + i * cc, String.class), stddev, p05, + p25, p50, p75, p95)); + i++; + } + } + } catch (QueryException e) { + exec.signal(e); + query.close(); + return 1; + } + query.close(); + return 0; + } + + // Summary for top column values + public Integer summaryTop(org.apache.doris.hplsql.HplsqlParser.Summary_stmtContext ctx, String table, String select, + Row row, String conn, Conn.Type connType) { + // CAST AS INT does not work as expected (ID is still considered as STRING in ORDER BY for some reason) + StringBuilder sql = new StringBuilder("SELECT id, col, cnt FROM (" + + "SELECT id, col, cnt, ROW_NUMBER() OVER (PARTITION BY id ORDER BY cnt DESC) rn " + + "FROM (SELECT CAST(GROUPING__ID AS DECIMAL) id, COALESCE("); + int topNum = evalPop(ctx.expr(0)).intValue(); + StringBuilder colsList = new StringBuilder(); + StringBuilder colsGrList = new StringBuilder(); + int i = 0; + for (Column c : row.getColumns()) { + String col = c.getName(); + if (connType == Conn.Type.HIVE) { + col = '`' + col + '`'; + } + if (i != 0) { + colsList.append(","); + colsGrList.append(","); + } + colsList.append(col); + colsGrList.append("(" + col + ")"); + i++; + } + sql.append(colsList); + sql.append(") col, COUNT(*) cnt"); + if (table != null) { + sql.append(" FROM (SELECT * FROM " + table); + if (ctx.where_clause() != null) { + sql.append(" " + evalPop(ctx.where_clause()).toString()); + } + if (ctx.T_LIMIT() != null) { + sql.append(" LIMIT " + evalPop(ctx.expr(1)).toString()); + } + sql.append(") t"); + } else { + sql.append(" FROM (" + select + ") t"); + } + sql.append(" GROUP BY "); + sql.append(colsList); + sql.append(" GROUPING SETS ("); + sql.append(colsGrList); + sql.append(")) t) t WHERE rn <= " + topNum + " ORDER BY id, cnt DESC"); + // Add LIMIT as Order by-s without limit can disabled for safety reasons + sql.append(" LIMIT " + topNum * row.size()); + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + exec.signal(query); + return 1; + } + exec.setSqlSuccess(); + try { + int prevId = -1; + int grRow = 0; + int colNum = 0; + int maxLen = row.getColumn(colNum).getName().length(); + ArrayList outCols = new ArrayList<>(); + ArrayList outCnts = new ArrayList<>(); + ArrayList outLens = new ArrayList<>(); + while (query.next()) { + int id = query.column(0, Integer.class); + String value = query.column(1, String.class); + int cnt = query.column(2, Integer.class); + if (prevId == -1) { + prevId = id; + } + // Still the same column + if (id == prevId) { + outCols.add(value); + outCnts.add(cnt); + if (value != null && value.length() > maxLen) { + maxLen = Math.min(value.length(), 300); + } + grRow++; + } else { // First value for next column + // Pad with empty rows if the number of values in group is less than TOP num + for (int j = grRow; j < topNum; j++) { + outCols.add(""); + outCnts.add(0); + grRow++; + } + outCols.add(value); + outCnts.add(cnt); + outLens.add(maxLen); + colNum++; + maxLen = row.getColumn(colNum).getName().length(); + if (value != null && value.length() > maxLen) { + maxLen = Math.min(value.length(), 300); + } + grRow = 1; + prevId = id; + } + } + for (int j = grRow; j < topNum; j++) { + outCols.add(""); + outCnts.add(0); + grRow++; + } + if (maxLen != 0) { + outLens.add(maxLen); + } + System.out.print("\n"); + // Output header + i = 0; + for (Column c : row.getColumns()) { + if (i != 0) { + System.out.print("\t"); + } + String fmt = String.format("%%-%ds", outLens.get(i) + 11 + 3); + System.out.print(String.format(fmt, c.getName())); + i++; + } + System.out.print("\n"); + // Output top values + for (int j = 0; j < topNum; j++) { + for (int k = 0; k < row.size(); k++) { + if (k != 0) { + System.out.print("\t"); + } + int cnt = outCnts.get(j + k * topNum); + if (cnt != 0) { // skip padded values + String fmt = String.format("%%-%ds", outLens.get(k)); + System.out.print(String.format(fmt, outCols.get(j + k * topNum))); + System.out.print(String.format(" %-11d", cnt)); + } else { + String fmt = String.format("%%-%ds", outLens.get(k) + 11 + 3); + System.out.print(String.format(fmt, "")); + } + } + System.out.print("\n"); + } + } catch (QueryException e) { + exec.signal(e); + query.close(); + return 1; + } + query.close(); + return 0; + } + + /** + * RESIGNAL statement + */ + public Integer resignal(org.apache.doris.hplsql.HplsqlParser.Resignal_stmtContext ctx) { + trace(ctx, "RESIGNAL"); + if (ctx.T_SQLSTATE() != null) { + String sqlstate = evalPop(ctx.expr(0)).toString(); + String text = ""; + if (ctx.T_MESSAGE_TEXT() != null) { + text = evalPop(ctx.expr(1)).toString(); + } + SQLException exception = new SQLException(text, sqlstate, -1); + Signal signal = new Signal(Signal.Type.SQLEXCEPTION, text, exception); + exec.setSqlCode(exception); + exec.resignal(signal); + } else { + exec.resignal(); + } + return 0; + } + + /** + * RETURN statement + */ + public Integer return_(org.apache.doris.hplsql.HplsqlParser.Return_stmtContext ctx) { + trace(ctx, "RETURN"); + if (ctx.expr() != null) { + eval(ctx.expr()); + } + exec.signal(Signal.Type.LEAVE_ROUTINE); + return 0; + } + + /** + * Check if an exception is raised or EXIT executed, and we should leave the block + */ + boolean canContinue(String label) { + Signal signal = exec.signalPeek(); + if (signal != null && signal.type == Signal.Type.SQLEXCEPTION) { + return false; + } + signal = exec.signalPeek(); + if (signal != null && signal.type == Signal.Type.LEAVE_LOOP) { + if (signal.value == null || signal.value.isEmpty() || (label != null && label.equalsIgnoreCase( + signal.value))) { + exec.signalPop(); + } + return false; + } + return true; + } + + /** + * Evaluate the expression and push the value to the stack + */ + void eval(ParserRuleContext ctx) { + exec.visit(ctx); + } + + /** + * Evaluate the expression to specified String value + */ + void evalString(String string) { + exec.stackPush(new Var(string)); + } + + void evalString(StringBuilder string) { + evalString(string.toString()); + } + + /** + * Evaluate the expression and pop value from the stack + */ + Var evalPop(ParserRuleContext ctx) { + visit(ctx); + if (!exec.stack.isEmpty()) { + return exec.stackPop(); + } + return Var.Empty; + } + + Var evalPop(ParserRuleContext ctx, long def) { + if (ctx != null) { + exec.visit(ctx); + return exec.stackPop(); + } + return new Var(def); + } + + /** + * Execute rules + */ + Integer visit(ParserRuleContext ctx) { + return exec.visit(ctx); + } + + /** + * Execute children rules + */ + Integer visitChildren(ParserRuleContext ctx) { + return exec.visitChildren(ctx); + } + + /** + * Trace information + */ + void trace(ParserRuleContext ctx, String message) { + exec.trace(ctx, message); + } + + void trace(ParserRuleContext ctx, Var var, Metadata metadata, int idx) { + exec.trace(ctx, var, metadata, idx); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/StreamGobbler.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/StreamGobbler.java new file mode 100644 index 00000000000000..5ed63edfb44a90 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/StreamGobbler.java @@ -0,0 +1,55 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/StreamGobbler.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; + +/** + * Read a stream from an external process + */ +public class StreamGobbler extends Thread { + private final Console console; + private final InputStream is; + + StreamGobbler(InputStream is, Console console) { + this.is = is; + this.console = console; + } + + public void run() { + try { + InputStreamReader isr = new InputStreamReader(is); + BufferedReader br = new BufferedReader(isr); + while (true) { + String line = br.readLine(); + if (line == null) { + break; + } + console.printLine(line); + } + } catch (IOException ioe) { + ioe.printStackTrace(); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/SyntaxErrorReporter.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/SyntaxErrorReporter.java new file mode 100644 index 00000000000000..cdee10be74f6bd --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/SyntaxErrorReporter.java @@ -0,0 +1,39 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/SyntaxErrorReporter.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.antlr.v4.runtime.BaseErrorListener; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.Recognizer; + +public class SyntaxErrorReporter extends BaseErrorListener { + private final Console console; + + public SyntaxErrorReporter(Console console) { + this.console = console; + } + + @Override + public void syntaxError(Recognizer recognizer, Object offendingSymbol, int line, int charPositionInLine, + String msg, RecognitionException e) { + console.printError("Syntax error at line " + line + ":" + charPositionInLine + " " + msg); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Timer.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Timer.java new file mode 100644 index 00000000000000..ddc41b088b5bb8 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Timer.java @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Timer.java +// and modified by Doris + +package org.apache.doris.hplsql; + +public class Timer { + long start = 0; + long stop = 0; + long elapsed = 0; + + /** + * Start the timer + */ + public long start() { + start = System.currentTimeMillis(); + return start; + } + + /** + * Get intermediate timer value + */ + public long current() { + return System.currentTimeMillis(); + } + + /** + * Stop the timer and return elapsed time + */ + public long stop() { + stop = System.currentTimeMillis(); + elapsed = stop - start; + return elapsed; + } + + /** + * Format the elapsed time + */ + public String format() { + if (elapsed < 1000) { + return String.valueOf(elapsed) + " ms"; + } + return String.format("%.2f", ((float) elapsed) / 1000) + " sec"; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Utils.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Utils.java new file mode 100644 index 00000000000000..7e9be52787980a --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Utils.java @@ -0,0 +1,330 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Utils.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import java.sql.Date; +import java.sql.Timestamp; + +public class Utils { + + /** + * Unquote string and remove escape characters inside the script + */ + public static String unquoteString(String s) { + if (s == null) { + return null; + } + + int len = s.length(); + StringBuilder s2 = new StringBuilder(len); + + for (int i = 0; i < len; i++) { + char ch = s.charAt(i); + char ch2 = (i < len - 1) ? s.charAt(i + 1) : 0; + + if ((i == 0 || i == len - 1) && (ch == '\'' || ch == '"')) { + continue; + } else + // \' and '' escape sequences + if ((ch == '\\' && ch2 == '\'') || (ch == '\'' && ch2 == '\'')) { + continue; + } + + s2.append(ch); + } + + return s2.toString(); + } + + /** + * Quote string and escape characters - ab'c -> 'ab''c' + */ + public static String quoteString(String s) { + if (s == null) { + return null; + } + int len = s.length(); + StringBuilder s2 = new StringBuilder(len + 2).append('\''); + + for (int i = 0; i < len; i++) { + char ch = s.charAt(i); + s2.append(ch); + if (ch == '\'') { + s2.append(ch); + } + } + s2.append('\''); + return s2.toString(); + } + + /** + * Merge quoted strings: 'a' 'b' -> 'ab'; 'a''b' 'c' -> 'a''bc' + */ + public static String mergeQuotedStrings(String s1, String s2) { + if (s1 == null || s2 == null) { + return null; + } + + int len1 = s1.length(); + int len2 = s2.length(); + + if (len1 == 0 || len2 == 0) { + return s1; + } + + return s1.substring(0, len1 - 1) + s2.substring(1); + } + + /** + * Convert String to Date + */ + public static Date toDate(String s) { + int len = s.length(); + if (len >= 10) { + int c4 = s.charAt(4); + int c7 = s.charAt(7); + // YYYY-MM-DD + if (c4 == '-' && c7 == '-') { + return Date.valueOf(s.substring(0, 10)); + } + } + return null; + } + + /** + * Convert String to Timestamp + */ + public static Timestamp toTimestamp(String s) { + int len = s.length(); + if (len >= 10) { + int c4 = s.charAt(4); + int c7 = s.charAt(7); + // YYYY-MM-DD + if (c4 == '-' && c7 == '-') { + // Convert DB2 syntax: YYYY-MM-DD-HH.MI.SS.FFF + if (len > 19) { + if (s.charAt(10) == '-') { + String s2 = s.substring(0, 10) + ' ' + s.substring(11, 13) + ':' + s.substring(14, 16) + ':' + + s.substring(17); + return Timestamp.valueOf(s2); + } + } else if (len == 10) { + s += " 00:00:00.000"; + } + return Timestamp.valueOf(s); + } + } + return null; + } + + /** + * Compare two String values and return min or max + */ + public static String minMaxString(String s1, String s2, boolean max) { + if (s1 == null) { + return s2; + } else if (s2 == null) { + return s1; + } + int cmp = s1.compareTo(s2); + if ((max && cmp < 0) || (!max && cmp > 0)) { + return s2; + } + return s1; + } + + /** + * Compare two Int values and return min or max + */ + public static Long minMaxInt(Long i1, String s, boolean max) { + Long i2 = null; + try { + i2 = Long.parseLong(s); + } catch (NumberFormatException ignored) { + // ignored + } + if (i1 == null) { + return i2; + } else if (i2 == null) { + return i1; + } + if ((max && i1.longValue() < i2.longValue()) || (!max && i1.longValue() > i2.longValue())) { + return i2; + } + return i1; + } + + /** + * Compare two Date values and return min or max + */ + public static Date minMaxDate(Date d1, String s, boolean max) { + Date d2 = Utils.toDate(s); + if (d1 == null) { + return d2; + } else if (d2 == null) { + return d1; + } + if ((max && d1.before(d2)) || (!max && d1.after(d2))) { + return d2; + } + return d1; + } + + /** + * Convert String array to a string with the specified delimiter + */ + public static String toString(String[] a, char del) { + StringBuilder s = new StringBuilder(); + for (int i = 0; i < a.length; i++) { + if (i > 0) { + s.append(del); + } + s.append(a[i]); + } + return s.toString(); + } + + /** + * Convert SQL datetime format string to Java SimpleDateFormat + */ + public static String convertSqlDatetimeFormat(String in) { + StringBuilder out = new StringBuilder(); + int len = in.length(); + int i = 0; + while (i < len) { + if (i + 4 <= len && in.substring(i, i + 4).compareTo("YYYY") == 0) { + out.append("yyyy"); + i += 4; + } else if (i + 2 <= len && in.substring(i, i + 2).compareTo("mm") == 0) { + out.append("MM"); + i += 2; + } else if (i + 2 <= len && in.substring(i, i + 2).compareTo("DD") == 0) { + out.append("dd"); + i += 2; + } else if (i + 4 <= len && in.substring(i, i + 4).compareToIgnoreCase("HH24") == 0) { + out.append("HH"); + i += 4; + } else if (i + 2 <= len && in.substring(i, i + 2).compareToIgnoreCase("MI") == 0) { + out.append("mm"); + i += 2; + } else if (i + 2 <= len && in.substring(i, i + 2).compareTo("SS") == 0) { + out.append("ss"); + i += 2; + } else { + out.append(in.charAt(i)); + i++; + } + } + return out.toString(); + } + + /** + * Get the executable directory + */ + public static String getExecDir() { + String dir = Hplsql.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + if (dir.endsWith(".jar")) { + dir = dir.substring(0, dir.lastIndexOf("/") + 1); + } + return dir; + } + + /** + * Format size value specified in bytes + */ + public static String formatSizeInBytes(long bytes, String postfix) { + String out; + if (bytes == 1) { + out = bytes + " byte"; + } else if (bytes < 1024) { + out = bytes + " bytes"; + } else if (bytes < 1024 * 1024) { + out = String.format("%.1f", ((float) bytes) / 1024) + " KB"; + } else if (bytes < 1024 * 1024 * 1024) { + out = String.format("%.1f", ((float) bytes) / (1024 * 1024)) + " MB"; + } else { + out = String.format("%.1f", ((float) bytes) / (1024 * 1024 * 1024)) + " GB"; + } + if (postfix != null && !postfix.isEmpty()) { + out += postfix; + } + return out; + } + + public static String formatSizeInBytes(long bytes) { + return Utils.formatSizeInBytes(bytes, null); + } + + /** + * Format elasped time + */ + public static String formatTime(long msElapsed) { + if (msElapsed < 60000) { + return msElapsed / 1000 + " sec"; + } else if (msElapsed < 60000 * 60) { + return msElapsed / 60000 + " min " + (msElapsed % 60000) / 1000 + " sec"; + } + return ""; + } + + /** + * Format bytes per second rate + */ + public static String formatBytesPerSec(long bytes, long msElapsed) { + if (msElapsed < 30) { + return "n/a"; + } + float bytesPerSec = ((float) bytes) / msElapsed * 1000; + return Utils.formatSizeInBytes((long) bytesPerSec, "/sec"); + } + + /** + * Format percentage + */ + public static String formatPercent(long current, long all) { + return String.format("%.1f", ((float) current) / all * 100) + "%"; + } + + /** + * Format count + */ + public static String formatCnt(long value, String suffix) { + if (value == 1) { + return value + " " + suffix; + } + return value + " " + suffix + "s"; + } + + public static String formatCnt(long value, String suffix, String suffix2) { + if (value == 1) { + return value + " " + suffix; + } + return value + " " + suffix2; + } + + /** + * Note. This stub is to resolve name conflict with ANTLR generated source using + * org.antlr.v4.runtime.misc.Utils.join + */ + static String join(T[] array, String separator) { + return org.antlr.v4.runtime.misc.Utils.join(array, separator); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/Var.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Var.java new file mode 100644 index 00000000000000..bea96aa149537c --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/Var.java @@ -0,0 +1,621 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Var.java +// and modified by Doris + +package org.apache.doris.hplsql; + +import org.apache.doris.hplsql.exception.TypeException; +import org.apache.doris.hplsql.executor.QueryResult; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.sql.Date; +import java.sql.Timestamp; +import java.util.ArrayList; + +/** + * Variable or the result of expression + */ +public class Var { + // Data types + public enum Type { + BOOL, CURSOR, DATE, DECIMAL, DERIVED_TYPE, DERIVED_ROWTYPE, DOUBLE, FILE, IDENT, BIGINT, INTERVAL, ROW, + RS_LOCATOR, STRING, STRINGLIST, TIMESTAMP, NULL, HPL_OBJECT + } + + public static final String DERIVED_TYPE = "DERIVED%TYPE"; + public static final String DERIVED_ROWTYPE = "DERIVED%ROWTYPE"; + public static Var Empty = new Var(); + public static Var Null = new Var(Type.NULL); + + public String name; + public Type type; + public Object value; + + int len; + int scale; + + boolean constant = false; + + public Var() { + type = Type.NULL; + } + + public Var(Var var) { + name = var.name; + type = var.type; + value = var.value; + len = var.len; + scale = var.scale; + } + + public Var(Long value) { + this.type = Type.BIGINT; + this.value = value; + } + + public Var(BigDecimal value) { + this.type = Type.DECIMAL; + this.value = value; + } + + public Var(String name, Long value) { + this.type = Type.BIGINT; + this.name = name; + this.value = value; + } + + public Var(String value) { + this.type = Type.STRING; + this.value = value; + } + + public Var(Double value) { + this.type = Type.DOUBLE; + this.value = value; + } + + public Var(Date value) { + this.type = Type.DATE; + this.value = value; + } + + public Var(Timestamp value, int scale) { + this.type = Type.TIMESTAMP; + this.value = value; + this.scale = scale; + } + + public Var(Interval value) { + this.type = Type.INTERVAL; + this.value = value; + } + + public Var(ArrayList value) { + this.type = Type.STRINGLIST; + this.value = value; + } + + public Var(Boolean b) { + type = Type.BOOL; + value = b; + } + + public Var(String name, Row row) { + this.name = name; + this.type = Type.ROW; + this.value = new Row(row); + } + + public Var(Type type, String name) { + this.type = type; + this.name = name; + } + + public Var(Type type, Object value) { + this.type = type; + this.value = value; + } + + public Var(String name, Type type, Object value) { + this.name = name; + this.type = type; + this.value = value; + } + + public Var(Type type) { + this.type = type; + } + + public Var(String name, String type, Integer len, Integer scale, Var def) { + this.name = name; + setType(type); + if (len != null) { + this.len = len; + } + if (scale != null) { + this.scale = scale; + } + if (def != null) { + cast(def); + } + } + + public Var(String name, String type, String len, String scale, Var def) { + this(name, type, len != null ? Integer.parseInt(len) : null, scale != null ? Integer.parseInt(scale) : null, + def); + } + + /** + * Cast a new value to the variable + */ + public Var cast(Var val) { + try { + if (constant) { + return this; + } else if (val == null || val.value == null) { + value = null; + } else if (type == Type.DERIVED_TYPE) { + type = val.type; + value = val.value; + } else if (type == val.type && type == Type.STRING) { + cast((String) val.value); + } else if (type == val.type) { + value = val.value; + } else if (type == Type.STRING) { + cast(val.toString()); + } else if (type == Type.BIGINT) { + if (val.type == Type.STRING) { + value = Long.parseLong((String) val.value); + } else if (val.type == Type.DECIMAL) { + value = ((BigDecimal) val.value).longValue(); + } + } else if (type == Type.DECIMAL) { + if (val.type == Type.STRING) { + value = new BigDecimal((String) val.value); + } else if (val.type == Type.BIGINT) { + value = BigDecimal.valueOf(val.longValue()); + } else if (val.type == Type.DOUBLE) { + value = BigDecimal.valueOf(val.doubleValue()); + } + } else if (type == Type.DOUBLE) { + if (val.type == Type.STRING) { + value = Double.valueOf((String) val.value); + } else if (val.type == Type.BIGINT || val.type == Type.DECIMAL) { + value = Double.valueOf(val.doubleValue()); + } + } else if (type == Type.DATE) { + value = org.apache.doris.hplsql.Utils.toDate(val.toString()); + } else if (type == Type.TIMESTAMP) { + value = org.apache.doris.hplsql.Utils.toTimestamp(val.toString()); + } + } catch (NumberFormatException e) { + throw new TypeException(null, type, val.type, val.value); + } + return this; + } + + /** + * Cast a new string value to the variable + */ + public Var cast(String val) { + if (!constant && type == Type.STRING) { + if (len != 0) { + int l = val.length(); + if (l > len) { + value = val.substring(0, len); + return this; + } + } + value = val; + } + return this; + } + + /** + * Set the new value + */ + public void setValue(String str) { + if (!constant && type == Type.STRING) { + value = str; + } + } + + public Var setValue(Long val) { + if (!constant && type == Type.BIGINT) { + value = val; + } + return this; + } + + public Var setValue(Boolean val) { + if (!constant && type == Type.BOOL) { + value = val; + } + return this; + } + + public void setValue(Object value) { + if (!constant) { + this.value = value; + } + } + + public Var setValue(QueryResult queryResult, int idx) { + int type = queryResult.jdbcType(idx); + if (type == java.sql.Types.CHAR || type == java.sql.Types.VARCHAR) { + cast(new Var(queryResult.column(idx, String.class))); + } else if (type == java.sql.Types.INTEGER || type == java.sql.Types.BIGINT + || type == java.sql.Types.SMALLINT || type == java.sql.Types.TINYINT) { + cast(new Var(queryResult.column(idx, Long.class))); + } else if (type == java.sql.Types.DECIMAL || type == java.sql.Types.NUMERIC) { + cast(new Var(queryResult.column(idx, BigDecimal.class))); + } else if (type == java.sql.Types.FLOAT || type == java.sql.Types.DOUBLE) { + cast(new Var(queryResult.column(idx, Double.class))); + } + return this; + } + + public Var setRowValues(QueryResult queryResult) { + Row row = (Row) this.value; + int idx = 0; + for (Column column : row.getColumns()) { + Var var = new Var(column.getName(), column.getType(), (Integer) null, null, null); + var.setValue(queryResult, idx); + column.setValue(var); + idx++; + } + return this; + } + + /** + * Set the data type from string representation + */ + public void setType(String type) { + this.type = defineType(type); + } + + /** + * Set the data type from JDBC type code + */ + void setType(int type) { + this.type = defineType(type); + } + + /** + * Set the variable as constant + */ + void setConstant(boolean constant) { + this.constant = constant; + } + + /** + * Define the data type from string representation + */ + public static Type defineType(String type) { + if (type == null) { + return Type.NULL; + } else if (type.equalsIgnoreCase("INT") || type.equalsIgnoreCase("INTEGER") || type.equalsIgnoreCase("BIGINT") + || type.equalsIgnoreCase("SMALLINT") || type.equalsIgnoreCase("TINYINT") + || type.equalsIgnoreCase("BINARY_INTEGER") || type.equalsIgnoreCase("PLS_INTEGER") + || type.equalsIgnoreCase("SIMPLE_INTEGER") || type.equalsIgnoreCase("INT2") + || type.equalsIgnoreCase("INT4") || type.equalsIgnoreCase("INT8")) { + return Type.BIGINT; + } else if (type.equalsIgnoreCase("CHAR") || type.equalsIgnoreCase("VARCHAR") || type.equalsIgnoreCase( + "VARCHAR2") + || type.equalsIgnoreCase("STRING") || type.equalsIgnoreCase("XML") + || type.equalsIgnoreCase("CHARACTER")) { + return Type.STRING; + } else if (type.equalsIgnoreCase("DEC") || type.equalsIgnoreCase("DECIMAL") || type.equalsIgnoreCase("NUMERIC") + || + type.equalsIgnoreCase("NUMBER")) { + return Type.DECIMAL; + } else if (type.equalsIgnoreCase("REAL") || type.equalsIgnoreCase("FLOAT") || type.toUpperCase() + .startsWith("DOUBLE") || type.equalsIgnoreCase("BINARY_FLOAT") + || type.toUpperCase().startsWith("BINARY_DOUBLE") || type.equalsIgnoreCase("SIMPLE_FLOAT") + || type.toUpperCase().startsWith("SIMPLE_DOUBLE")) { + return Type.DOUBLE; + } else if (type.equalsIgnoreCase("DATE")) { + return Type.DATE; + } else if (type.equalsIgnoreCase("TIMESTAMP")) { + return Type.TIMESTAMP; + } else if (type.equalsIgnoreCase("BOOL") || type.equalsIgnoreCase("BOOLEAN")) { + return Type.BOOL; + } else if (type.equalsIgnoreCase("SYS_REFCURSOR")) { + return Type.CURSOR; + } else if (type.equalsIgnoreCase("UTL_FILE.FILE_TYPE")) { + return Type.FILE; + } else if (type.toUpperCase().startsWith("RESULT_SET_LOCATOR")) { + return Type.RS_LOCATOR; + } else if (type.equalsIgnoreCase(Var.DERIVED_TYPE)) { + return Type.DERIVED_TYPE; + } else if (type.equalsIgnoreCase(Type.HPL_OBJECT.name())) { + return Type.HPL_OBJECT; + } else if (type.equalsIgnoreCase(Type.ROW.name())) { + return Type.ROW; + } + return Type.NULL; + } + + /** + * Define the data type from JDBC type code + */ + public static Type defineType(int type) { + if (type == java.sql.Types.CHAR || type == java.sql.Types.VARCHAR) { + return Type.STRING; + } else if (type == java.sql.Types.INTEGER || type == java.sql.Types.BIGINT) { + return Type.BIGINT; + } + return Type.NULL; + } + + /** + * Remove value + */ + public void removeValue() { + type = Type.NULL; + name = null; + value = null; + len = 0; + scale = 0; + } + + /** + * Compare values + */ + @Override + public boolean equals(Object obj) { + if (getClass() != obj.getClass()) { + return false; + } + Var var = (Var) obj; + if (this == var) { + return true; + } else if (var == null || var.value == null || this.value == null) { + return false; + } + if (type == Type.BIGINT) { + if (var.type == Type.BIGINT && ((Long) value).longValue() == ((Long) var.value).longValue()) { + return true; + } else if (var.type == Type.DECIMAL) { + return equals((BigDecimal) var.value, (Long) value); + } + } else if (type == Type.STRING && var.type == Type.STRING && value.equals(var.value)) { + return true; + } else if (type == Type.DECIMAL && var.type == Type.DECIMAL + && ((BigDecimal) value).compareTo((BigDecimal) var.value) == 0) { + return true; + } else if (type == Type.DOUBLE) { + if (var.type == Type.DOUBLE && ((Double) value).compareTo((Double) var.value) == 0) { + return true; + } else if (var.type == Type.DECIMAL + && ((Double) value).compareTo(((BigDecimal) var.value).doubleValue()) == 0) { + return true; + } + } + return false; + } + + /** + * Check if variables of different data types are equal + */ + public boolean equals(BigDecimal d, Long i) { + if (d.compareTo(new BigDecimal(i)) == 0) { + return true; + } + return false; + } + + /** + * Compare values + */ + public int compareTo(Var v) { + if (this == v) { + return 0; + } else if (v == null) { + return -1; + } else if (type == Type.BIGINT && v.type == Type.BIGINT) { + return ((Long) value).compareTo((Long) v.value); + } else if (type == Type.DOUBLE && v.type == Type.DECIMAL) { + return (new BigDecimal((double) value)).compareTo((BigDecimal) v.value); + } else if (type == Type.STRING && v.type == Type.STRING) { + return ((String) value).compareTo((String) v.value); + } + return -1; + } + + /** + * Calculate difference between values in percent + */ + public BigDecimal percentDiff(Var var) { + BigDecimal d1 = new Var(Var.Type.DECIMAL).cast(this).decimalValue(); + BigDecimal d2 = new Var(Var.Type.DECIMAL).cast(var).decimalValue(); + if (d1 != null && d2 != null) { + if (d1.compareTo(BigDecimal.ZERO) != 0) { + return d1.subtract(d2).abs().multiply(new BigDecimal(100)).divide(d1, 2, RoundingMode.HALF_UP); + } + } + return null; + } + + /** + * Increment an integer value + */ + public Var increment(long i) { + if (type == Type.BIGINT) { + value = Long.valueOf(((Long) value).longValue() + i); + } + return this; + } + + /** + * Decrement an integer value + */ + public Var decrement(long i) { + if (type == Type.BIGINT) { + value = Long.valueOf(((Long) value).longValue() - i); + } + return this; + } + + /** + * Return an integer value + */ + public int intValue() { + if (type == Type.BIGINT) { + return ((Long) value).intValue(); + } else if (type == Type.STRING) { + return Integer.parseInt((String) value); + } + throw new NumberFormatException(); + } + + /** + * Return a long integer value + */ + public long longValue() { + if (type == Type.BIGINT) { + return ((Long) value).longValue(); + } + throw new NumberFormatException(); + } + + /** + * Return a decimal value + */ + public BigDecimal decimalValue() { + if (type == Type.DECIMAL) { + return (BigDecimal) value; + } + throw new NumberFormatException(); + } + + /** + * Return a double value + */ + public double doubleValue() { + if (type == Type.DOUBLE) { + return ((Double) value).doubleValue(); + } else if (type == Type.BIGINT) { + return ((Long) value).doubleValue(); + } else if (type == Type.DECIMAL) { + return ((BigDecimal) value).doubleValue(); + } + throw new NumberFormatException(); + } + + /** + * Return true/false for BOOL type + */ + public boolean isTrue() { + if (type == Type.BOOL && value != null) { + return ((Boolean) value).booleanValue(); + } + return false; + } + + /** + * Negate the value + */ + public void negate() { + if (value == null) { + return; + } + if (type == Type.BOOL) { + boolean v = ((Boolean) value).booleanValue(); + value = Boolean.valueOf(!v); + } else if (type == Type.DECIMAL) { + BigDecimal v = (BigDecimal) value; + value = v.negate(); + } else if (type == Type.DOUBLE) { + Double v = (Double) value; + value = -v; + } else if (type == Type.BIGINT) { + Long v = (Long) value; + value = -v; + } else { + throw new NumberFormatException("invalid type " + type); + } + } + + /** + * Check if the variable contains NULL + */ + public boolean isNull() { + if (type == Type.NULL || value == null) { + return true; + } + return false; + } + + /** + * Convert value to String + */ + @Override + public String toString() { + if (type == Type.IDENT) { + return name; + } else if (value == null) { + return null; + } else if (type == Type.BIGINT) { + return ((Long) value).toString(); + } else if (type == Type.STRING) { + return (String) value; + } else if (type == Type.DATE) { + return ((Date) value).toString(); + } else if (type == Type.TIMESTAMP) { + int len = 19; + String t = ((Timestamp) value).toString(); // .0 returned if the fractional part not set + if (scale > 0) { + len += scale + 1; + } + if (t.length() > len) { + t = t.substring(0, len); + } + return t; + } + return value.toString(); + } + + /** + * Convert value to SQL string - string literals are quoted and escaped, ab'c -> 'ab''c' + */ + public String toSqlString() { + if (value == null) { + return "NULL"; + } else if (type == Type.STRING) { + return org.apache.doris.hplsql.Utils.quoteString((String) value); + } + return toString(); + } + + /** + * Set variable name + */ + public void setName(String name) { + this.name = name; + } + + /** + * Get variable name + */ + public String getName() { + return name; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/ArityException.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/ArityException.java new file mode 100644 index 00000000000000..f59018a9888219 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/ArityException.java @@ -0,0 +1,34 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ArityException.java +// and modified by Doris + +package org.apache.doris.hplsql.exception; + +import org.antlr.v4.runtime.ParserRuleContext; + +public class ArityException extends HplValidationException { + public ArityException(ParserRuleContext ctx, String procName, int formalCount, int actualCount) { + super(ctx, "wrong number of arguments in call to '" + procName + + "'. Expected " + formalCount + " got " + actualCount + "."); + } + + public ArityException(ParserRuleContext ctx, String message) { + super(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/HplValidationException.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/HplValidationException.java new file mode 100644 index 00000000000000..07aa76cfa5f28c --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/HplValidationException.java @@ -0,0 +1,36 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/HplValidationException.java +// and modified by Doris + +package org.apache.doris.hplsql.exception; + +import org.antlr.v4.runtime.ParserRuleContext; + +public class HplValidationException extends RuntimeException { + private final ParserRuleContext ctx; + + public HplValidationException(ParserRuleContext ctx, String message) { + super(message); + this.ctx = ctx; + } + + public ParserRuleContext getCtx() { + return ctx; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/NoSuchHplMethodException.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/NoSuchHplMethodException.java new file mode 100644 index 00000000000000..d06520454df091 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/NoSuchHplMethodException.java @@ -0,0 +1,29 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/NoSuchHplMethodException.java +// and modified by Doris + +package org.apache.doris.hplsql.exception; + +import org.antlr.v4.runtime.ParserRuleContext; + +public class NoSuchHplMethodException extends HplValidationException { + public NoSuchHplMethodException(ParserRuleContext ctx, String message) { + super(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/QueryException.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/QueryException.java new file mode 100644 index 00000000000000..b6565ba6f8b238 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/QueryException.java @@ -0,0 +1,41 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/QueryException.java +// and modified by Doris + +package org.apache.doris.hplsql.exception; + +import java.sql.SQLException; + +public class QueryException extends RuntimeException { + public QueryException(Throwable cause) { + super(cause); + } + + public int getErrorCode() { + return getCause() instanceof SQLException + ? ((SQLException) getCause()).getErrorCode() + : -1; + } + + public String getSQLState() { + return getCause() instanceof SQLException + ? ((SQLException) getCause()).getSQLState() + : "02000"; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/TypeException.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/TypeException.java new file mode 100644 index 00000000000000..95bad143774c6a --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/TypeException.java @@ -0,0 +1,39 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/TypeException.java +// and modified by Doris + +package org.apache.doris.hplsql.exception; + +import org.apache.doris.hplsql.Var.Type; + +import org.antlr.v4.runtime.ParserRuleContext; + +public class TypeException extends HplValidationException { + public TypeException(ParserRuleContext ctx, Type expectedType, Type actualType, Object value) { + super(ctx, "cannot convert '" + value + "' with type " + actualType + " to " + expectedType); + } + + public TypeException(ParserRuleContext ctx, Class expectedType, Type actualType, Object value) { + super(ctx, "cannot convert '" + value + "' with type " + actualType + " to " + expectedType); + } + + public TypeException(ParserRuleContext ctx, String message) { + super(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/UndefinedIdentException.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/UndefinedIdentException.java new file mode 100644 index 00000000000000..2b5ec895d720da --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/exception/UndefinedIdentException.java @@ -0,0 +1,29 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/UndefinedIdentException.java +// and modified by Doris + +package org.apache.doris.hplsql.exception; + +import org.antlr.v4.runtime.ParserRuleContext; + +public class UndefinedIdentException extends HplValidationException { + public UndefinedIdentException(ParserRuleContext ctx, String ident) { + super(ctx, "identifier '" + ident + "' must be declared."); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/ColumnMeta.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/ColumnMeta.java new file mode 100644 index 00000000000000..189cfb11842323 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/ColumnMeta.java @@ -0,0 +1,57 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/ColumnMeta.java +// and modified by Doris + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.catalog.Type; + +public class ColumnMeta { + private final String columnName; + private final String typeName; + private final int jdbcType; + private final Type dorisType; + + public ColumnMeta(String columnName, String typeName, int jdbcType) { + this(columnName, typeName, jdbcType, Type.INVALID); + } + + public ColumnMeta(String columnName, String typeName, int jdbcType, Type dorisType) { + this.columnName = columnName; + this.typeName = typeName; + this.jdbcType = jdbcType; + this.dorisType = dorisType; + } + + public String getColumnName() { + return columnName; + } + + public String getTypeName() { + return typeName; + } + + public int getJdbcType() { + return jdbcType; + } + + public Type getDorisType() { + return dorisType; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/DorisQueryExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/DorisQueryExecutor.java new file mode 100644 index 00000000000000..f9b0c787934ea9 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/DorisQueryExecutor.java @@ -0,0 +1,70 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.catalog.MysqlColType; +import org.apache.doris.catalog.PrimitiveType; +import org.apache.doris.catalog.Type; +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ConnectProcessor; +import org.apache.doris.qe.StmtExecutor; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class DorisQueryExecutor implements QueryExecutor { + public DorisQueryExecutor() { + } + + @Override + public QueryResult executeQuery(String sql, ParserRuleContext ctx) { + try { + // A cursor may correspond to a query, and if the user opens multiple cursors, need to save multiple + // query states, so here each query constructs a ConnectProcessor and the ConnectContext shares some data. + ConnectContext context = ConnectContext.get().createContext(); + ConnectProcessor processor = new ConnectProcessor(context); + processor.executeQuery(sql); + StmtExecutor executor = context.getExecutor(); + return new QueryResult(new DorisRowResult(executor.getCoord(), executor.getColumns(), + executor.getReturnTypes()), () -> metadata(executor), null); + } catch (Exception e) { + return new QueryResult(null, () -> new Metadata(Collections.emptyList()), e); + } + } + + private Metadata metadata(StmtExecutor stmtExecutor) { + try { + List columns = stmtExecutor.getColumns(); + List types = stmtExecutor.getReturnTypes(); + List colMeta = new ArrayList<>(); + for (int i = 0; i < columns.size(); i++) { + PrimitiveType primitiveType = types.get(i).getPrimitiveType(); + MysqlColType mysqlColType = primitiveType.toMysqlType(); + colMeta.add(new ColumnMeta(columns.get(i), mysqlColType.getTypeName(), primitiveType.toJavaSqlType(), + types.get(i))); + } + return new Metadata(colMeta); + } catch (Exception e) { + throw new QueryException(e); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/DorisRowResult.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/DorisRowResult.java new file mode 100644 index 00000000000000..285b352c875716 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/DorisRowResult.java @@ -0,0 +1,174 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.catalog.PrimitiveType; +import org.apache.doris.catalog.Type; +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.nereids.trees.expressions.literal.DateTimeLiteral; +import org.apache.doris.nereids.trees.expressions.literal.DateTimeV2Literal; +import org.apache.doris.qe.Coordinator; +import org.apache.doris.qe.RowBatch; +import org.apache.doris.statistics.util.InternalQueryBuffer; + +import java.math.BigDecimal; +import java.nio.ByteBuffer; +import java.sql.Date; +import java.sql.Time; +import java.util.List; + +public class DorisRowResult implements RowResult { + + private Coordinator coord; + + private List columnNames; + + private List dorisTypes; + + private RowBatch batch; + + private int index; + + private boolean isLazyLoading; + + private boolean eof; + + private Object[] current; + + public DorisRowResult(Coordinator coord, List columnNames, List dorisTypes) { + this.coord = coord; + this.columnNames = columnNames; + this.dorisTypes = dorisTypes; + this.current = new Object[columnNames.size()]; + this.isLazyLoading = false; + this.eof = false; + } + + @Override + public boolean next() { + if (eof) { + return false; + } + try { + if (batch == null || batch.getBatch() == null + || index == batch.getBatch().getRowsSize() - 1) { + batch = coord.getNext(); + index = 0; + if (batch.isEos()) { + eof = true; + return false; + } + } else { + ++index; + } + isLazyLoading = true; + } catch (Exception e) { + throw new QueryException(e); + } + return true; + } + + @Override + public void close() { + + } + + @Override + public T get(int columnIndex, Class type) { + if (isLazyLoading) { + convertToJavaType(batch.getBatch().getRows().get(index)); + isLazyLoading = false; + } + if (current[columnIndex] == null) { + return null; + } + if (type.isInstance(current[columnIndex])) { + return (T) current[columnIndex]; + } else { + if (current[columnIndex] instanceof Number) { + if (type.equals(Long.class)) { + return type.cast(((Number) current[columnIndex]).longValue()); + } else if (type.equals(Integer.class)) { + return type.cast(((Number) current[columnIndex]).intValue()); + } else if (type.equals(Short.class)) { + return type.cast(((Number) current[columnIndex]).shortValue()); + } else if (type.equals(Byte.class)) { + return type.cast(((Number) current[columnIndex]).byteValue()); + } + } + throw new ClassCastException(current[columnIndex].getClass() + " cannot be casted to " + type); + } + } + + @Override + public ByteBuffer getMysqlRow() { + return batch.getBatch().getRows().get(index); + } + + private void convertToJavaType(ByteBuffer buffer) { + InternalQueryBuffer queryBuffer = new InternalQueryBuffer(buffer.slice()); + for (int i = 0; i < columnNames.size(); i++) { + String value = queryBuffer.readStringWithLength(); + current[i] = toJavaType(dorisTypes.get(i).getPrimitiveType(), value); + } + } + + // https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-type-conversions.html + private Object toJavaType(PrimitiveType type, String value) { + if (value == null) { + return null; + } + switch (type) { + case BOOLEAN: + return Boolean.valueOf(value); + case TINYINT: + case SMALLINT: + case INT: + return Integer.valueOf(value); + case BIGINT: + return Long.valueOf(value); + case FLOAT: + return Float.valueOf(value); + case DOUBLE: + return Double.valueOf(value); + case TIME: + case TIMEV2: + return Time.valueOf(value); + case DATE: + case DATEV2: + return Date.valueOf(value); + case DATETIME: + if (type.isTimeType()) { + return Time.valueOf(value); + } + return new DateTimeLiteral(value).toJavaDateType(); + case DATETIMEV2: + if (type.isTimeType()) { + return Time.valueOf(value); + } + return new DateTimeV2Literal(value).toJavaDateType(); + case DECIMALV2: + case DECIMAL32: + case DECIMAL64: + case DECIMAL128: + return new BigDecimal(value); + default: + return value; + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/HplsqlQueryExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/HplsqlQueryExecutor.java new file mode 100644 index 00000000000000..ddef3c0dfe7fd4 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/HplsqlQueryExecutor.java @@ -0,0 +1,65 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.common.ErrorCode; +import org.apache.doris.hplsql.Arguments; +import org.apache.doris.hplsql.Conf; +import org.apache.doris.hplsql.Exec; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ConnectProcessor; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class HplsqlQueryExecutor { + private static final Logger LOG = LogManager.getLogger(HplsqlQueryExecutor.class); + + private HplsqlResult result; + + private Exec exec; + + public HplsqlQueryExecutor(ConnectProcessor processor) { + result = new HplsqlResult(processor); + exec = new Exec(new Conf(), result, new DorisQueryExecutor(), result); + exec.init(); + } + + public void execute(String statement) { + ConnectContext context = ConnectContext.get(); + result.reset(); + try { + Arguments args = new Arguments(); + args.parse(new String[] {"-e", statement}); + exec.parseAndEval(args); + + exec.printExceptions(); + String error = result.getError(); + String msg = result.getMsg(); + if (!error.isEmpty()) { + context.getState().setError("hplsql exec error, " + error); + } else if (!msg.isEmpty()) { + context.getState().setOk(0, 0, msg); + } + } catch (Exception e) { + exec.printExceptions(); + context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, result.getError() + " " + e.getMessage()); + LOG.warn(e); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/HplsqlResult.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/HplsqlResult.java new file mode 100644 index 00000000000000..e68cad7b0e6e6f --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/HplsqlResult.java @@ -0,0 +1,190 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.hplsql.Console; +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.mysql.MysqlEofPacket; +import org.apache.doris.mysql.MysqlSerializer; +import org.apache.doris.mysql.MysqlServerStatusFlag; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ConnectProcessor; +import org.apache.doris.qe.QueryState; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; +import java.nio.ByteBuffer; + +public class HplsqlResult implements ResultListener, Console { + + private static final Logger LOG = LogManager.getLogger(HplsqlResult.class); + private ConnectProcessor processor; + private Metadata metadata = null; + private StringBuilder msg; + private StringBuilder error; + private boolean isSendFields; + + HplsqlResult(ConnectProcessor processor) { + this.processor = processor; + this.msg = new StringBuilder(); + this.error = new StringBuilder(); + this.isSendFields = false; + } + + public void reset() { + metadata = null; + isSendFields = false; + error.delete(0, error.length()); + msg.delete(0, msg.length()); + } + + public String getMsg() { + return msg.toString(); + } + + public String getError() { + return error.toString(); + } + + @Override + public void onMysqlRow(ByteBuffer rows) { + sendData(() -> ConnectContext.get().getMysqlChannel().sendOnePacket(rows)); + } + + @Override + public void onRow(Object[] rows) { + sendData(() -> ConnectContext.get().getMysqlChannel().sendOnePacket(rows)); + } + + @Override + public void onMetadata(Metadata metadata) { + this.metadata = metadata; + isSendFields = false; + } + + @Override + public void onEof() { + ConnectContext.get().getState().setEof(); + try { + if (metadata != null && !isSendFields) { + sendFields(metadata, ConnectContext.get().getMysqlChannel().getSerializer()); + isSendFields = true; + } + } catch (IOException e) { + throw new QueryException(e); + } + } + + @Override + public void onFinalize() { + if (metadata == null) { + return; + } + finalizeCommand(); + metadata = null; + } + + private void sendData(Send send) { + if (metadata == null) { + throw new RuntimeException("The metadata has not been set."); + } + + MysqlSerializer serializer = ConnectContext.get().getMysqlChannel().getSerializer(); + try { + if (!isSendFields) { + // For some language driver, getting error packet after fields packet + // will be recognized as a success result + // so We need to send fields after first batch arrived + sendFields(metadata, serializer); + isSendFields = true; + } + serializer.reset(); + send.apply(); + } catch (IOException e) { + LOG.warn("send data fail.", e); + throw new RuntimeException(e); + } + } + + private void sendFields(Metadata metadata, MysqlSerializer serializer) throws IOException { + serializer.reset(); + serializer.writeVInt(metadata.columnCount()); + ConnectContext.get().getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); + // send field one by one + for (int i = 0; i < metadata.columnCount(); ++i) { + serializer.reset(); + serializer.writeField(metadata.columnName(i), metadata.columnType(i)); + ConnectContext.get().getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); + } + // send EOF + serializer.reset(); + MysqlEofPacket eofPacket = new MysqlEofPacket(ConnectContext.get().getState()); + eofPacket.writeTo(serializer); + ConnectContext.get().getMysqlChannel().sendOnePacket(serializer.toByteBuffer()); + } + + @Override + public void print(String msg) { + this.msg.append(msg); + } + + @Override + public void printLine(String msg) { + this.msg.append(msg).append("\n"); + } + + @Override + public void printError(String msg) { + this.error.append(msg); + } + + @Override + public void flushConsole() { + ConnectContext context = ConnectContext.get(); + boolean needSend = false; + if (error.length() > 0) { + context.getState().setError("hplsql exec error, " + error.toString()); + needSend = true; + } else if (msg.length() > 0) { + context.getState().setOk(0, 0, msg.toString()); + needSend = true; + } + if (needSend) { + finalizeCommand(); + reset(); + } + } + + private void finalizeCommand() { + try { + QueryState state = ConnectContext.get().getState(); + state.serverStatus |= MysqlServerStatusFlag.SERVER_MORE_RESULTS_EXISTS; + processor.finalizeCommand(); + state.reset(); + } catch (IOException e) { + throw new QueryException(e); + } + } + + @FunctionalInterface + public interface Send { + void apply() throws IOException; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/JdbcQueryExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/JdbcQueryExecutor.java new file mode 100644 index 00000000000000..7575c52cdf4369 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/JdbcQueryExecutor.java @@ -0,0 +1,109 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/JdbcQueryExecutor.java +// and modified by Doris + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.hplsql.Exec; +import org.apache.doris.hplsql.Query; +import org.apache.doris.hplsql.exception.QueryException; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.nio.ByteBuffer; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class JdbcQueryExecutor implements QueryExecutor { + private final Exec exec; + + public JdbcQueryExecutor(Exec exec) { + this.exec = exec; + } + + @Override + public QueryResult executeQuery(String sql, ParserRuleContext ctx) { + String conn = exec.getStatementConnection(); + Query query = exec.executeQuery(ctx, new Query(sql), conn); + ResultSet resultSet = query.getResultSet(); + if (resultSet == null) { // offline mode + return new QueryResult(null, () -> new Metadata(Collections.emptyList()), query.getException()); + } else { + return new QueryResult(new JdbcRowResult(resultSet), () -> metadata(resultSet), query.getException()); + } + } + + private static Metadata metadata(ResultSet resultSet) { + try { + ResultSetMetaData meta = resultSet.getMetaData(); + List colMetas = new ArrayList<>(); + for (int i = 1; i <= meta.getColumnCount(); i++) { + colMetas.add(new ColumnMeta( + meta.getColumnName(i), meta.getColumnTypeName(i), meta.getColumnType(i))); + } + return new Metadata(colMetas); + } catch (SQLException e) { + throw new QueryException(e); + } + } + + private static class JdbcRowResult implements org.apache.doris.hplsql.executor.RowResult { + private final ResultSet resultSet; + + private JdbcRowResult(ResultSet resultSet) { + this.resultSet = resultSet; + } + + @Override + public boolean next() { + try { + return resultSet.next(); + } catch (SQLException e) { + throw new QueryException(e); + } + } + + @Override + public T get(int columnIndex, Class type) { + try { + return (T) resultSet.getObject(columnIndex + 1); + } catch (SQLException e) { + throw new QueryException(e); + } + } + + @Override + public ByteBuffer getMysqlRow() { + throw new RuntimeException("not implement getMysqlRow method."); + } + + @Override + public void close() { + try { + resultSet.close(); + } catch (SQLException e) { + throw new QueryException(e); + } + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/Metadata.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/Metadata.java new file mode 100644 index 00000000000000..586eebe691e0de --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/Metadata.java @@ -0,0 +1,57 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/Metadata.java +// and modified by Doris + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.catalog.Type; + +import java.util.List; + +public class Metadata { + private final List columnMetas; + + public Metadata(List columnMetas) { + this.columnMetas = columnMetas; + } + + public int columnCount() { + return columnMetas.size(); + } + + public int jdbcType(int columnIndex) { + return at(columnIndex).getJdbcType(); + } + + public String columnName(int columnIndex) { + return at(columnIndex).getColumnName(); + } + + public String columnTypeName(int columnIndex) { + return at(columnIndex).getTypeName(); + } + + public Type columnType(int columnIndex) { + return at(columnIndex).getDorisType(); + } + + private ColumnMeta at(int columnIndex) { + return columnMetas.get(columnIndex); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/QueryExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/QueryExecutor.java new file mode 100644 index 00000000000000..a9e9fcd0f3e0f4 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/QueryExecutor.java @@ -0,0 +1,33 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/QueryExecutor.java +// and modified by Doris + +package org.apache.doris.hplsql.executor; + +import org.apache.doris.hplsql.exception.HplValidationException; + +import org.antlr.v4.runtime.ParserRuleContext; + +public interface QueryExecutor { + QueryResult executeQuery(String sql, ParserRuleContext ctx); + + QueryExecutor DISABLED = (sql, ctx) -> { + throw new HplValidationException(ctx, "Query execution is disabled in this context. Can not execute: " + sql); + }; +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/QueryResult.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/QueryResult.java new file mode 100644 index 00000000000000..d117b59ff1cd0c --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/QueryResult.java @@ -0,0 +1,98 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/QueryResult.java +// and modified by Doris + +package org.apache.doris.hplsql.executor; + +import java.nio.ByteBuffer; +import java.util.function.Supplier; + +public class QueryResult { + private final RowResult rows; + private final Supplier metadata; + private final Exception exception; + + public QueryResult(RowResult rows, Supplier metadata, Exception exception) { + this.rows = rows; + this.metadata = memoize(metadata); + this.exception = exception; + } + + public boolean next() { + return rows.next(); + } + + public int columnCount() { + return metadata().columnCount(); + } + + /** + * Get the nth column from the row result. + * The index is 0 based unlike in JDBC. + */ + public T column(int columnIndex, Class type) { + return rows.get(columnIndex, type); + } + + public ByteBuffer mysqlRow() { + return rows.getMysqlRow(); + } + + public boolean error() { + return exception != null; + } + + public void printStackTrace() { + if (exception != null) { + exception.printStackTrace(); + } + } + + public Exception exception() { + return exception; + } + + public Metadata metadata() { + return metadata.get(); + } + + public int jdbcType(int columnIndex) { + return metadata().jdbcType(columnIndex); + } + + public void close() { + if (rows != null) { + rows.close(); + } + } + + private static Supplier memoize(Supplier supplier) { + return com.google.common.base.Suppliers.memoize(supplier::get)::get; // cache the supplier result + } + + public String errorText() { + if (exception != null) { + if (exception instanceof ClassNotFoundException) { + return "ClassNotFoundException: " + exception.getMessage(); + } + return exception.getMessage(); + } + return ""; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/ResultListener.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/ResultListener.java new file mode 100644 index 00000000000000..9a64767df971f6 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/ResultListener.java @@ -0,0 +1,57 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ResultListener.java +// and modified by Doris + +package org.apache.doris.hplsql.executor; + +import java.nio.ByteBuffer; + +public interface ResultListener { + void onMysqlRow(ByteBuffer rows); + + void onRow(Object[] rows); + + void onMetadata(Metadata metadata); + + void onEof(); + + void onFinalize(); + + ResultListener NONE = new ResultListener() { + @Override + public void onMysqlRow(ByteBuffer rows) { + } + + @Override + public void onRow(Object[] rows) { + } + + @Override + public void onMetadata(Metadata metadata) { + } + + @Override + public void onEof() { + } + + @Override + public void onFinalize() { + } + }; +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/RowResult.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/RowResult.java new file mode 100644 index 00000000000000..9f6de01e289eec --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/executor/RowResult.java @@ -0,0 +1,33 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/RowResult.java +// and modified by Doris + +package org.apache.doris.hplsql.executor; + +import java.nio.ByteBuffer; + +public interface RowResult { + boolean next(); + + void close(); + + T get(int columnIndex, Class type); + + ByteBuffer getMysqlRow(); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/BuiltinFunctions.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/BuiltinFunctions.java new file mode 100644 index 00000000000000..7f553a0eeba2b6 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/BuiltinFunctions.java @@ -0,0 +1,439 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/BuiltinFunctions.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.Console; +import org.apache.doris.hplsql.Exec; +import org.apache.doris.hplsql.HplsqlParser; +import org.apache.doris.hplsql.Utils; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.sql.Date; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.HashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class BuiltinFunctions { + protected final Exec exec; + protected final Console console; + protected boolean trace; + protected final QueryExecutor queryExecutor; + protected HashMap map = new HashMap<>(); + protected HashMap specMap = new HashMap<>(); + protected HashMap specSqlMap = new HashMap<>(); + + public BuiltinFunctions(Exec exec, QueryExecutor queryExecutor) { + this.exec = exec; + this.trace = exec.getTrace(); + this.console = exec.getConsole(); + this.queryExecutor = queryExecutor; + } + + public void register(BuiltinFunctions f) { + } + + public boolean exec(String name, HplsqlParser.Expr_func_paramsContext ctx) { + if (name.contains(".")) { // Name can be qualified and spaces are allowed between parts + String[] parts = name.split("\\."); + StringBuilder str = new StringBuilder(); + for (int i = 0; i < parts.length; i++) { + if (i > 0) { + str.append("."); + } + str.append(parts[i].trim()); + } + name = str.toString(); + } + if (trace && ctx != null && ctx.parent != null && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) { + trace(ctx, "FUNC " + name); + } + org.apache.doris.hplsql.functions.FuncCommand func = map.get(name.toUpperCase()); + if (func != null) { + func.run(ctx); + return true; + } else { + return false; + } + } + + public boolean exists(String name) { + if (name == null) { + return false; + } + name = name.toUpperCase(); + return map.containsKey(name) || specMap.containsKey(name) || specSqlMap.containsKey(name); + } + + /** + * Execute a special function + */ + public void specExec(HplsqlParser.Expr_spec_funcContext ctx) { + String name = ctx.start.getText().toUpperCase(); + if (trace && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) { + trace(ctx, "FUNC " + name); + } + org.apache.doris.hplsql.functions.FuncSpecCommand func = specMap.get(name); + if (func != null) { + func.run(ctx); + } else if (ctx.T_MAX_PART_STRING() != null) { + execMaxPartString(ctx); + } else if (ctx.T_MIN_PART_STRING() != null) { + execMinPartString(ctx); + } else if (ctx.T_MAX_PART_INT() != null) { + execMaxPartInt(ctx); + } else if (ctx.T_MIN_PART_INT() != null) { + execMinPartInt(ctx); + } else if (ctx.T_MAX_PART_DATE() != null) { + execMaxPartDate(ctx); + } else if (ctx.T_MIN_PART_DATE() != null) { + execMinPartDate(ctx); + } else if (ctx.T_PART_LOC() != null) { + execPartLoc(ctx); + } else { + evalNull(); + } + } + + /** + * Execute a special function in executable SQL statement + */ + public void specExecSql(HplsqlParser.Expr_spec_funcContext ctx) { + String name = ctx.start.getText().toUpperCase(); + if (trace && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) { + trace(ctx, "FUNC " + name); + } + org.apache.doris.hplsql.functions.FuncSpecCommand func = specSqlMap.get(name); + if (func != null) { + func.run(ctx); + } else { + exec.stackPush(Exec.getFormattedText(ctx)); + } + } + + /** + * Get the current date + */ + public void execCurrentDate(HplsqlParser.Expr_spec_funcContext ctx) { + if (trace) { + trace(ctx, "CURRENT_DATE"); + } + SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd"); + String s = f.format(Calendar.getInstance().getTime()); + exec.stackPush(new Var(Var.Type.DATE, Utils.toDate(s))); + } + + /** + * Execute MAX_PART_STRING function + */ + public void execMaxPartString(HplsqlParser.Expr_spec_funcContext ctx) { + if (trace) { + trace(ctx, "MAX_PART_STRING"); + } + execMinMaxPart(ctx, Var.Type.STRING, true /*max*/); + } + + /** + * Execute MIN_PART_STRING function + */ + public void execMinPartString(HplsqlParser.Expr_spec_funcContext ctx) { + if (trace) { + trace(ctx, "MIN_PART_STRING"); + } + execMinMaxPart(ctx, Var.Type.STRING, false /*max*/); + } + + /** + * Execute MAX_PART_INT function + */ + public void execMaxPartInt(HplsqlParser.Expr_spec_funcContext ctx) { + if (trace) { + trace(ctx, "MAX_PART_INT"); + } + execMinMaxPart(ctx, Var.Type.BIGINT, true /*max*/); + } + + /** + * Execute MIN_PART_INT function + */ + public void execMinPartInt(HplsqlParser.Expr_spec_funcContext ctx) { + if (trace) { + trace(ctx, "MIN_PART_INT"); + } + execMinMaxPart(ctx, Var.Type.BIGINT, false /*max*/); + } + + /** + * Execute MAX_PART_DATE function + */ + public void execMaxPartDate(HplsqlParser.Expr_spec_funcContext ctx) { + if (trace) { + trace(ctx, "MAX_PART_DATE"); + } + execMinMaxPart(ctx, Var.Type.DATE, true /*max*/); + } + + /** + * Execute MIN_PART_DATE function + */ + public void execMinPartDate(HplsqlParser.Expr_spec_funcContext ctx) { + if (trace) { + trace(ctx, "MIN_PART_DATE"); + } + execMinMaxPart(ctx, Var.Type.DATE, false /*max*/); + } + + /** + * Execute MIN or MAX partition function + */ + public void execMinMaxPart(HplsqlParser.Expr_spec_funcContext ctx, Var.Type type, boolean max) { + String tabname = evalPop(ctx.expr(0)).toString(); + StringBuilder sql = new StringBuilder("SHOW PARTITIONS " + tabname); + String colname = null; + int colnum = -1; + int exprnum = ctx.expr().size(); + // Column name + if (ctx.expr(1) != null) { + colname = evalPop(ctx.expr(1)).toString(); + } else { + colnum = 0; + } + // Partition filter + if (exprnum >= 4) { + sql.append(" PARTITION ("); + int i = 2; + while (i + 1 < exprnum) { + String fcol = evalPop(ctx.expr(i)).toString(); + String fval = evalPop(ctx.expr(i + 1)).toSqlString(); + if (i > 2) { + sql.append(", "); + } + sql.append(fcol).append("=").append(fval); + i += 2; + } + sql.append(")"); + } + if (trace) { + trace(ctx, "Query: " + sql); + } + if (exec.getOffline()) { + evalNull(); + return; + } + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + evalNullClose(query); + return; + } + try { + String resultString = null; + Long resultInt = null; + Date resultDate = null; + while (query.next()) { + String[] parts = query.column(0, String.class).split("/"); + // Find partition column by name + if (colnum == -1) { + for (int i = 0; i < parts.length; i++) { + String[] name = parts[i].split("="); + if (name[0].equalsIgnoreCase(colname)) { + colnum = i; + break; + } + } + // No partition column with the specified name exists + if (colnum == -1) { + evalNullClose(query); + return; + } + } + String[] pair = parts[colnum].split("="); + if (type == Var.Type.STRING) { + resultString = Utils.minMaxString(resultString, pair[1], max); + } else if (type == Var.Type.BIGINT) { + resultInt = Utils.minMaxInt(resultInt, pair[1], max); + } else if (type == Var.Type.DATE) { + resultDate = Utils.minMaxDate(resultDate, pair[1], max); + } + } + if (resultString != null) { + evalString(resultString); + } else if (resultInt != null) { + evalInt(resultInt); + } else if (resultDate != null) { + evalDate(resultDate); + } else { + evalNull(); + } + } catch (QueryException ignored) { + // ignored + } + query.close(); + } + + /** + * Execute PART_LOC function + */ + public void execPartLoc(HplsqlParser.Expr_spec_funcContext ctx) { + String tabname = evalPop(ctx.expr(0)).toString(); + StringBuilder sql = new StringBuilder("DESCRIBE EXTENDED " + tabname); + int exprnum = ctx.expr().size(); + boolean hostname = false; + // Partition filter + if (exprnum > 1) { + sql.append(" PARTITION ("); + int i = 1; + while (i + 1 < exprnum) { + String col = evalPop(ctx.expr(i)).toString(); + String val = evalPop(ctx.expr(i + 1)).toSqlString(); + if (i > 2) { + sql.append(", "); + } + sql.append(col).append("=").append(val); + i += 2; + } + sql.append(")"); + } + // With host name + if (exprnum % 2 == 0 && evalPop(ctx.expr(exprnum - 1)).intValue() == 1) { + hostname = true; + } + if (trace) { + trace(ctx, "Query: " + sql); + } + if (exec.getOffline()) { + evalNull(); + return; + } + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + evalNullClose(query); + return; + } + String result = null; + try { + while (query.next()) { + if (query.column(0, String.class).startsWith("Detailed Partition Information")) { + Matcher m = Pattern.compile(".*, location:(.*?),.*").matcher(query.column(1, String.class)); + if (m.find()) { + result = m.group(1); + } + } + } + } catch (QueryException ignored) { + // ignored + } + if (result != null) { + // Remove the host name + if (!hostname) { + Matcher m = Pattern.compile(".*://.*?(/.*)").matcher(result); + if (m.find()) { + result = m.group(1); + } + } + evalString(result); + } else { + evalNull(); + } + query.close(); + } + + public void trace(ParserRuleContext ctx, String message) { + if (trace) { + exec.trace(ctx, message); + } + } + + protected void evalNull() { + exec.stackPush(Var.Null); + } + + protected void evalString(String string) { + exec.stackPush(new Var(string)); + } + + protected Var evalPop(ParserRuleContext ctx) { + exec.visit(ctx); + return exec.stackPop(); + } + + protected void evalInt(Long i) { + exec.stackPush(new Var(i)); + } + + protected void evalDate(Date date) { + exec.stackPush(new Var(Var.Type.DATE, date)); + } + + protected void evalNullClose(QueryResult query) { + exec.stackPush(Var.Null); + query.close(); + if (trace) { + query.printStackTrace(); + } + } + + protected void evalVar(Var var) { + exec.stackPush(var); + } + + protected void evalString(StringBuilder string) { + evalString(string.toString()); + } + + protected void evalInt(int i) { + evalInt(Long.valueOf(i)); + } + + protected Var evalPop(ParserRuleContext ctx, int value) { + if (ctx != null) { + return evalPop(ctx); + } + return new Var(Long.valueOf(value)); + } + + /** + * Get the number of parameters in function call + */ + public static int getParamCount(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx == null) { + return 0; + } + return ctx.func_param().size(); + } + + protected void eval(ParserRuleContext ctx) { + exec.visit(ctx); + } + + protected Integer visit(ParserRuleContext ctx) { + return exec.visit(ctx); + } + + protected void info(ParserRuleContext ctx, String message) { + exec.info(ctx, message); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/DorisFunctionRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/DorisFunctionRegistry.java new file mode 100644 index 00000000000000..45b8da3bbc4680 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/DorisFunctionRegistry.java @@ -0,0 +1,225 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/HmsFunctionRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.Exec; +import org.apache.doris.hplsql.HplsqlBaseVisitor; +import org.apache.doris.hplsql.HplsqlLexer; +import org.apache.doris.hplsql.HplsqlParser; +import org.apache.doris.hplsql.Scope; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.store.MetaClient; +import org.apache.doris.hplsql.store.StoredProcedure; +import org.apache.doris.qe.ConnectContext; + +import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.ParserRuleContext; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +public class DorisFunctionRegistry implements FunctionRegistry { + private Exec exec; + private boolean trace; + private MetaClient client; + private BuiltinFunctions builtinFunctions; + private Map cache = new HashMap<>(); + + public DorisFunctionRegistry(Exec e, MetaClient client, BuiltinFunctions builtinFunctions) { + this.exec = e; + this.client = client; + this.builtinFunctions = builtinFunctions; + this.trace = exec.getTrace(); + } + + @Override + public boolean exists(String name) { + return isCached(name) || getProc(name).isPresent(); + } + + @Override + public void remove(String name) { + try { + client.dropStoredProcedure(name, ConnectContext.get().getCurrentCatalog().getName(), + ConnectContext.get().getDatabase()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private boolean isCached(String name) { + return cache.containsKey(qualified(name)); + } + + private String qualified(String name) { + return (ConnectContext.get().getDatabase() + "." + name).toUpperCase(); + } + + + @Override + public boolean exec(String name, HplsqlParser.Expr_func_paramsContext ctx) { + if (builtinFunctions.exec(name, ctx)) { + return true; + } + if (isCached(name)) { + trace(ctx, "EXEC CACHED FUNCTION " + name); + execProcOrFunc(ctx, cache.get(qualified(name)), name); + return true; + } + Optional proc = getProc(name); + if (proc.isPresent()) { + trace(ctx, "EXEC HMS FUNCTION " + name); + ParserRuleContext procCtx = parse(proc.get()); + execProcOrFunc(ctx, procCtx, name); + saveInCache(name, procCtx); + return true; + } + return false; + } + + /** + * Execute a stored procedure using CALL or EXEC statement passing parameters + */ + private void execProcOrFunc(HplsqlParser.Expr_func_paramsContext ctx, ParserRuleContext procCtx, String name) { + exec.callStackPush(name); + HashMap out = new HashMap<>(); + ArrayList actualParams = getActualCallParameters(ctx); + exec.enterScope(Scope.Type.ROUTINE); + callWithParameters(ctx, procCtx, out, actualParams); + exec.callStackPop(); + exec.leaveScope(); + for (Map.Entry i : out.entrySet()) { // Set OUT parameters + exec.setVariable(i.getKey(), i.getValue()); + } + } + + private void callWithParameters(HplsqlParser.Expr_func_paramsContext ctx, ParserRuleContext procCtx, + HashMap out, ArrayList actualParams) { + if (procCtx instanceof HplsqlParser.Create_function_stmtContext) { + HplsqlParser.Create_function_stmtContext func = (HplsqlParser.Create_function_stmtContext) procCtx; + InMemoryFunctionRegistry.setCallParameters(func.ident().getText(), ctx, actualParams, + func.create_routine_params(), null, exec); + if (func.declare_block_inplace() != null) { + exec.visit(func.declare_block_inplace()); + } + exec.visit(func.single_block_stmt()); + } else { + HplsqlParser.Create_procedure_stmtContext proc = (HplsqlParser.Create_procedure_stmtContext) procCtx; + InMemoryFunctionRegistry.setCallParameters(proc.ident(0).getText(), ctx, actualParams, + proc.create_routine_params(), out, exec); + exec.visit(proc.proc_block()); + } + } + + private ParserRuleContext parse(StoredProcedure proc) { + HplsqlLexer lexer = new HplsqlLexer(new ANTLRInputStream(proc.getSource())); + CommonTokenStream tokens = new CommonTokenStream(lexer); + HplsqlParser parser = new HplsqlParser(tokens); + ProcVisitor visitor = new ProcVisitor(); + parser.program().accept(visitor); + return visitor.func != null ? visitor.func : visitor.proc; + } + + private Optional getProc(String name) { + return Optional.ofNullable(client.getStoredProcedure(name, ConnectContext.get().getCurrentCatalog().getName(), + ConnectContext.get().getDatabase())); + } + + private ArrayList getActualCallParameters(HplsqlParser.Expr_func_paramsContext actual) { + if (actual == null || actual.func_param() == null) { + return null; + } + int cnt = actual.func_param().size(); + ArrayList values = new ArrayList<>(cnt); + for (int i = 0; i < cnt; i++) { + values.add(evalPop(actual.func_param(i).expr())); + } + return values; + } + + @Override + public void addUserFunction(HplsqlParser.Create_function_stmtContext ctx) { + String name = ctx.ident().getText().toUpperCase(); + if (builtinFunctions.exists(name)) { + exec.info(ctx, name + " is a built-in function which cannot be redefined."); + return; + } + trace(ctx, "CREATE FUNCTION " + name); + saveInCache(name, ctx); + saveStoredProc(name, Exec.getFormattedText(ctx), ctx.T_REPLACE() != null); + } + + @Override + public void addUserProcedure(HplsqlParser.Create_procedure_stmtContext ctx) { + String name = ctx.ident(0).getText().toUpperCase(); + if (builtinFunctions.exists(name)) { + exec.info(ctx, name + " is a built-in function which cannot be redefined."); + return; + } + trace(ctx, "CREATE PROCEDURE " + name); + saveInCache(name, ctx); + saveStoredProc(name, Exec.getFormattedText(ctx), ctx.T_REPLACE() != null); + } + + private void saveStoredProc(String name, String source, boolean isForce) { + client.addStoredProcedure(name, ConnectContext.get().getCurrentCatalog().getName(), + ConnectContext.get().getDatabase(), + ConnectContext.get().getQualifiedUser(), source, isForce); + } + + private void saveInCache(String name, ParserRuleContext procCtx) { + cache.put(qualified(name), procCtx); + } + + /** + * Evaluate the expression and pop value from the stack + */ + private Var evalPop(ParserRuleContext ctx) { + exec.visit(ctx); + return exec.stackPop(); + } + + private void trace(ParserRuleContext ctx, String message) { + if (trace) { + exec.trace(ctx, message); + } + } + + private static class ProcVisitor extends HplsqlBaseVisitor { + HplsqlParser.Create_function_stmtContext func; + HplsqlParser.Create_procedure_stmtContext proc; + + @Override + public Void visitCreate_procedure_stmt(HplsqlParser.Create_procedure_stmtContext ctx) { + proc = ctx; + return null; + } + + @Override + public Void visitCreate_function_stmt(HplsqlParser.Create_function_stmtContext ctx) { + func = ctx; + return null; + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FuncCommand.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FuncCommand.java new file mode 100644 index 00000000000000..f6a01972afe9f3 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FuncCommand.java @@ -0,0 +1,27 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/InMemoryFunctionRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.HplsqlParser; + +interface FuncCommand { + void run(HplsqlParser.Expr_func_paramsContext ctx); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FuncSpecCommand.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FuncSpecCommand.java new file mode 100644 index 00000000000000..26a811c6e98687 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FuncSpecCommand.java @@ -0,0 +1,27 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/InMemoryFunctionRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.HplsqlParser; + +interface FuncSpecCommand { + void run(HplsqlParser.Expr_spec_funcContext ctx); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionDatetime.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionDatetime.java new file mode 100644 index 00000000000000..6e253465b9a7db --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionDatetime.java @@ -0,0 +1,202 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionDatetime.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.Exec; +import org.apache.doris.hplsql.HplsqlParser; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.executor.QueryExecutor; + +import org.apache.commons.lang3.StringUtils; + +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.TimeZone; + +public class FunctionDatetime extends BuiltinFunctions { + public FunctionDatetime(Exec e, QueryExecutor queryExecutor) { + super(e, queryExecutor); + } + + /** + * Register functions + */ + @Override + public void register(BuiltinFunctions f) { + f.map.put("DATE", this::date); + f.map.put("FROM_UNIXTIME", this::fromUnixtime); + f.map.put("NOW", ctx -> now(ctx)); + f.map.put("TIMESTAMP_ISO", this::timestampIso); + f.map.put("TO_TIMESTAMP", this::toTimestamp); + f.map.put("UNIX_TIMESTAMP", this::unixTimestamp); + f.map.put("CURRENT_TIME_MILLIS", this::currentTimeMillis); + + f.specMap.put("CURRENT_DATE", this::currentDate); + f.specMap.put("CURRENT_TIMESTAMP", this::currentTimestamp); + f.specMap.put("SYSDATE", this::currentTimestamp); + + f.specSqlMap.put("CURRENT_DATE", + (org.apache.doris.hplsql.functions.FuncSpecCommand) this::currentDateSql); + f.specSqlMap.put("CURRENT_TIMESTAMP", + (org.apache.doris.hplsql.functions.FuncSpecCommand) this::currentTimestampSql); + } + + /** + * CURRENT_DATE + */ + public void currentDate(HplsqlParser.Expr_spec_funcContext ctx) { + evalVar(currentDate()); + } + + public static Var currentDate() { + SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd"); + String s = f.format(Calendar.getInstance().getTime()); + return new Var(org.apache.doris.hplsql.Var.Type.DATE, + org.apache.doris.hplsql.Utils.toDate(s)); + } + + /** + * CURRENT_DATE in executable SQL statement + */ + public void currentDateSql(HplsqlParser.Expr_spec_funcContext ctx) { + if (exec.getConnectionType() == org.apache.doris.hplsql.Conn.Type.HIVE) { + evalString("TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP()))"); + } else { + evalString(exec.getFormattedText(ctx)); + } + } + + /** + * CURRENT_TIMESTAMP + */ + public void currentTimestamp(HplsqlParser.Expr_spec_funcContext ctx) { + int precision = evalPop(ctx.expr(0), 3).intValue(); + evalVar(currentTimestamp(precision)); + } + + public static Var currentTimestamp(int precision) { + String format = "yyyy-MM-dd HH:mm:ss"; + if (precision > 0 && precision <= 3) { + format += "." + StringUtils.repeat("S", precision); + } + SimpleDateFormat f = new SimpleDateFormat(format); + String s = f.format(Calendar.getInstance(TimeZone.getDefault()).getTime()); + return new Var(org.apache.doris.hplsql.Utils.toTimestamp(s), precision); + } + + /** + * CURRENT_TIMESTAMP in executable SQL statement + */ + public void currentTimestampSql(HplsqlParser.Expr_spec_funcContext ctx) { + if (exec.getConnectionType() == org.apache.doris.hplsql.Conn.Type.HIVE) { + evalString("FROM_UNIXTIME(UNIX_TIMESTAMP())"); + } else { + evalString(org.apache.doris.hplsql.Exec.getFormattedText(ctx)); + } + } + + /** + * DATE function + */ + void date(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() != 1) { + evalNull(); + return; + } + Var var = new Var(org.apache.doris.hplsql.Var.Type.DATE); + var.cast(evalPop(ctx.func_param(0).expr())); + evalVar(var); + } + + /** + * NOW() function (current date and time) + */ + void now(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx != null) { + evalNull(); + return; + } + evalVar(currentTimestamp(3)); + } + + /** + * TIMESTAMP_ISO function + */ + void timestampIso(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() != 1) { + evalNull(); + return; + } + Var var = new Var(org.apache.doris.hplsql.Var.Type.TIMESTAMP); + var.cast(evalPop(ctx.func_param(0).expr())); + evalVar(var); + } + + /** + * TO_TIMESTAMP function + */ + void toTimestamp(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() != 2) { + evalNull(); + return; + } + String value = evalPop(ctx.func_param(0).expr()).toString(); + String sqlFormat = evalPop(ctx.func_param(1).expr()).toString(); + String format = org.apache.doris.hplsql.Utils.convertSqlDatetimeFormat(sqlFormat); + try { + long timeInMs = new SimpleDateFormat(format).parse(value).getTime(); + evalVar(new Var(org.apache.doris.hplsql.Var.Type.TIMESTAMP, new Timestamp(timeInMs))); + } catch (Exception e) { + exec.signal(e); + evalNull(); + } + } + + /** + * FROM_UNIXTIME() function (convert seconds since 1970-01-01 00:00:00 to timestamp) + */ + void fromUnixtime(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = getParamCount(ctx); + if (cnt == 0) { + evalNull(); + return; + } + long epoch = evalPop(ctx.func_param(0).expr()).longValue(); + String format = "yyyy-MM-dd HH:mm:ss"; + if (cnt > 1) { + format = evalPop(ctx.func_param(1).expr()).toString(); + } + evalString(new SimpleDateFormat(format).format(new Date(epoch * 1000))); + } + + /** + * UNIX_TIMESTAMP() function (current date and time in seconds since 1970-01-01 00:00:00) + */ + void unixTimestamp(HplsqlParser.Expr_func_paramsContext ctx) { + evalVar(new Var(System.currentTimeMillis() / 1000)); + } + + public void currentTimeMillis(HplsqlParser.Expr_func_paramsContext ctx) { + evalVar(new Var(System.currentTimeMillis())); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionMisc.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionMisc.java new file mode 100644 index 00000000000000..d6eb7d66c8000c --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionMisc.java @@ -0,0 +1,313 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.Conn; +import org.apache.doris.hplsql.Exec; +import org.apache.doris.hplsql.HplsqlParser; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.exception.QueryException; +import org.apache.doris.hplsql.executor.QueryExecutor; +import org.apache.doris.hplsql.executor.QueryResult; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +public class FunctionMisc extends BuiltinFunctions { + public FunctionMisc(Exec e, QueryExecutor queryExecutor) { + super(e, queryExecutor); + } + + /** + * Register functions + */ + @Override + public void register(BuiltinFunctions f) { + f.map.put("COALESCE", this::nvl); + f.map.put("DECODE", this::decode); + f.map.put("NVL", this::nvl); + f.map.put("NVL2", this::nvl2); + f.map.put("PART_COUNT_BY", this::partCountBy); + f.map.put("MOD", this::modulo); + + f.specMap.put("ACTIVITY_COUNT", this::activityCount); + f.specMap.put("CAST", this::cast); + f.specMap.put("CURRENT", this::current); + f.specMap.put("CURRENT_USER", this::currentUser); + f.specMap.put("PART_COUNT", this::partCount); + f.specMap.put("USER", this::currentUser); + + f.specSqlMap.put("CURRENT", this::currentSql); + } + + /** + * ACTIVITY_COUNT function (built-in variable) + */ + void activityCount(HplsqlParser.Expr_spec_funcContext ctx) { + evalInt(Long.valueOf(exec.getRowCount())); + } + + /** + * CAST function + */ + void cast(HplsqlParser.Expr_spec_funcContext ctx) { + if (ctx.expr().size() != 1) { + evalNull(); + return; + } + String type = ctx.dtype().getText(); + String len = null; + String scale = null; + if (ctx.dtype_len() != null) { + len = ctx.dtype_len().L_INT(0).getText(); + if (ctx.dtype_len().L_INT(1) != null) { + scale = ctx.dtype_len().L_INT(1).getText(); + } + } + Var var = new Var(null, type, len, scale, null); + var.cast(evalPop(ctx.expr(0))); + evalVar(var); + } + + /** + * CURRENT function + */ + void current(HplsqlParser.Expr_spec_funcContext ctx) { + if (ctx.T_DATE() != null) { + evalVar(FunctionDatetime.currentDate()); + } else if (ctx.T_TIMESTAMP() != null) { + int precision = evalPop(ctx.expr(0), 3).intValue(); + evalVar(FunctionDatetime.currentTimestamp(precision)); + } else if (ctx.T_USER() != null) { + evalVar(FunctionMisc.currentUser()); + } else { + evalNull(); + } + } + + /** + * CURRENT function in executable SQL statement + */ + void currentSql(HplsqlParser.Expr_spec_funcContext ctx) { + if (ctx.T_DATE() != null) { + if (exec.getConnectionType() == Conn.Type.HIVE) { + evalString("TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP()))"); + } else { + evalString("CURRENT_DATE"); + } + } else if (ctx.T_TIMESTAMP() != null) { + if (exec.getConnectionType() == Conn.Type.HIVE) { + evalString("FROM_UNIXTIME(UNIX_TIMESTAMP())"); + } else { + evalString("CURRENT_TIMESTAMP"); + } + } else { + evalString(exec.getFormattedText(ctx)); + } + } + + /** + * CURRENT_USER function + */ + void currentUser(HplsqlParser.Expr_spec_funcContext ctx) { + evalVar(currentUser()); + } + + public static Var currentUser() { + return new Var(System.getProperty("user.name")); + } + + /** + * DECODE function + */ + void decode(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = ctx.func_param().size(); + if (cnt < 3) { + evalNull(); + return; + } + Var value = evalPop(ctx.func_param(0).expr()); + int i = 1; + while (i + 1 < cnt) { + Var when = evalPop(ctx.func_param(i).expr()); + if ((value.isNull() && when.isNull()) || value.equals(when)) { + eval(ctx.func_param(i + 1).expr()); + return; + } + i += 2; + } + if (i < cnt) { // ELSE expression + eval(ctx.func_param(i).expr()); + } else { + evalNull(); + } + } + + /** + * NVL function - Return first non-NULL expression + */ + void nvl(HplsqlParser.Expr_func_paramsContext ctx) { + for (int i = 0; i < ctx.func_param().size(); i++) { + Var v = evalPop(ctx.func_param(i).expr()); + if (v.type != Var.Type.NULL) { + exec.stackPush(v); + return; + } + } + evalNull(); + } + + /** + * NVL2 function - If expr1 is not NULL return expr2, otherwise expr3 + */ + void nvl2(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() == 3) { + if (!evalPop(ctx.func_param(0).expr()).isNull()) { + eval(ctx.func_param(1).expr()); + } else { + eval(ctx.func_param(2).expr()); + } + } else { + evalNull(); + } + } + + /** + * PART_COUNT function + */ + public void partCount(HplsqlParser.Expr_spec_funcContext ctx) { + String tabname = evalPop(ctx.expr(0)).toString(); + StringBuilder sql = new StringBuilder(); + sql.append("SHOW PARTITIONS "); + sql.append(tabname); + int cnt = ctx.expr().size(); + if (cnt > 1) { + sql.append(" PARTITION ("); + int i = 1; + while (i + 1 < cnt) { + String col = evalPop(ctx.expr(i)).toString(); + String val = evalPop(ctx.expr(i + 1)).toSqlString(); + if (i > 2) { + sql.append(", "); + } + sql.append(col); + sql.append("="); + sql.append(val); + i += 2; + } + sql.append(")"); + } + if (trace) { + trace(ctx, "Query: " + sql); + } + if (exec.getOffline()) { + evalNull(); + return; + } + QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); + if (query.error()) { + evalNullClose(query); + return; + } + int result = 0; + try { + while (query.next()) { + result++; + } + } catch (Exception e) { + evalNullClose(query); + return; + } + evalInt(result); + query.close(); + } + + public void modulo(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() == 2) { + int a = evalPop(ctx.func_param(0).expr()).intValue(); + int b = evalPop(ctx.func_param(1).expr()).intValue(); + evalInt(a % b); + } else { + evalNull(); + } + } + + /** + * PART_COUNT_BY function + */ + public void partCountBy(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = ctx.func_param().size(); + if (cnt < 1 || exec.getOffline()) { + return; + } + String tabname = evalPop(ctx.func_param(0).expr()).toString(); + ArrayList keys = null; + if (cnt > 1) { + keys = new ArrayList<>(); + for (int i = 1; i < cnt; i++) { + keys.add(evalPop(ctx.func_param(i).expr()).toString().toUpperCase()); + } + } + String sql = "SHOW PARTITIONS " + tabname; + QueryResult query = queryExecutor.executeQuery(sql, ctx); + if (query.error()) { + query.close(); + return; + } + Map group = new HashMap<>(); + try { + while (query.next()) { + String part = query.column(0, String.class); + String[] parts = part.split("/"); + String key = parts[0]; + if (cnt > 1) { + StringBuilder k = new StringBuilder(); + for (int i = 0; i < parts.length; i++) { + if (keys.contains(parts[i].split("=")[0].toUpperCase())) { + if (k.length() > 0) { + k.append("/"); + } + k.append(parts[i]); + } + } + key = k.toString(); + } + Integer count = group.get(key); + if (count == null) { + count = Integer.valueOf(0); + } + group.put(key, count + 1); + } + } catch (QueryException e) { + query.close(); + return; + } + if (cnt == 1) { + evalInt(group.size()); + } else { + for (Map.Entry i : group.entrySet()) { + console.printLine(i.getKey() + '\t' + i.getValue()); + } + } + query.close(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionRegistry.java new file mode 100644 index 00000000000000..9b52c0a98ea5b5 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionRegistry.java @@ -0,0 +1,35 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.HplsqlParser; + +public interface FunctionRegistry { + boolean exec(String name, HplsqlParser.Expr_func_paramsContext ctx); + + void addUserFunction(HplsqlParser.Create_function_stmtContext ctx); + + void addUserProcedure(HplsqlParser.Create_procedure_stmtContext ctx); + + boolean exists(String name); + + void remove(String name); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionString.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionString.java new file mode 100644 index 00000000000000..413d4aa90201ec --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/FunctionString.java @@ -0,0 +1,289 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.Exec; +import org.apache.doris.hplsql.HplsqlParser; +import org.apache.doris.hplsql.executor.QueryExecutor; + +public class FunctionString extends BuiltinFunctions { + public FunctionString(Exec e, QueryExecutor queryExecutor) { + super(e, queryExecutor); + } + + /** + * Register functions + */ + @Override + public void register(BuiltinFunctions f) { + f.map.put("CONCAT", this::concat); + f.map.put("CHAR", this::char_); + f.map.put("INSTR", this::instr); + f.map.put("LEN", this::len); + f.map.put("LENGTH", this::length); + f.map.put("LOWER", this::lower); + f.map.put("REPLACE", this::replace); + f.map.put("SUBSTR", this::substr); + f.map.put("SUBSTRING", this::substr); + f.map.put("TO_CHAR", this::toChar); + f.map.put("UPPER", this::upper); + + f.specMap.put("SUBSTRING", this::substring); + f.specMap.put("TRIM", this::trim); + } + + /** + * CONCAT function + */ + void concat(HplsqlParser.Expr_func_paramsContext ctx) { + StringBuilder val = new StringBuilder(); + int cnt = getParamCount(ctx); + boolean nulls = true; + for (int i = 0; i < cnt; i++) { + org.apache.doris.hplsql.Var c = evalPop(ctx.func_param(i).expr()); + if (!c.isNull()) { + val.append(c.toString()); + nulls = false; + } + } + if (nulls) { + evalNull(); + } else { + evalString(val); + } + } + + /** + * CHAR function + */ + void char_(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = getParamCount(ctx); + if (cnt != 1) { + evalNull(); + return; + } + String str = evalPop(ctx.func_param(0).expr()).toString(); + evalString(str); + } + + /** + * INSTR function + */ + void instr(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = getParamCount(ctx); + if (cnt < 2) { + evalNull(); + return; + } + String str = evalPop(ctx.func_param(0).expr()).toString(); + if (str == null) { + evalNull(); + return; + } else if (str.isEmpty()) { + evalInt(0); + return; + } + String substr = evalPop(ctx.func_param(1).expr()).toString(); + int pos = 1; + int occur = 1; + int idx = 0; + if (cnt >= 3) { + pos = evalPop(ctx.func_param(2).expr()).intValue(); + if (pos == 0) { + pos = 1; + } + } + if (cnt >= 4) { + occur = evalPop(ctx.func_param(3).expr()).intValue(); + if (occur < 0) { + occur = 1; + } + } + for (int i = occur; i > 0; i--) { + if (pos > 0) { + idx = str.indexOf(substr, pos - 1); + } else { + str = str.substring(0, str.length() - pos * (-1)); + idx = str.lastIndexOf(substr); + } + if (idx == -1) { + idx = 0; + break; + } else { + idx++; + } + if (i > 1) { + if (pos > 0) { + pos = idx + 1; + } else { + pos = (str.length() - idx + 1) * (-1); + } + } + } + evalInt(idx); + } + + /** + * LEN function (excluding trailing spaces) + */ + void len(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() != 1) { + evalNull(); + return; + } + int len = evalPop(ctx.func_param(0).expr()).toString().trim().length(); + evalInt(len); + } + + /** + * LENGTH function + */ + void length(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() != 1) { + evalNull(); + return; + } + int len = evalPop(ctx.func_param(0).expr()).toString().length(); + evalInt(len); + } + + /** + * LOWER function + */ + void lower(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() != 1) { + evalNull(); + return; + } + String str = evalPop(ctx.func_param(0).expr()).toString().toLowerCase(); + evalString(str); + } + + /** + * REPLACE function + */ + void replace(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = getParamCount(ctx); + if (cnt < 3) { + evalNull(); + return; + } + String str = evalPop(ctx.func_param(0).expr()).toString(); + String what = evalPop(ctx.func_param(1).expr()).toString(); + String with = evalPop(ctx.func_param(2).expr()).toString(); + evalString(str.replaceAll(what, with)); + } + + /** + * SUBSTR and SUBSTRING function + */ + void substr(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = getParamCount(ctx); + if (cnt < 2) { + evalNull(); + return; + } + String str = evalPop(ctx.func_param(0).expr()).toString(); + int start = evalPop(ctx.func_param(1).expr()).intValue(); + int len = -1; + if (start == 0) { + start = 1; + } + if (cnt > 2) { + len = evalPop(ctx.func_param(2).expr()).intValue(); + } + substr(str, start, len); + } + + void substr(String str, int start, int len) { + if (str == null) { + evalNull(); + return; + } else if (str.isEmpty()) { + evalString(str); + return; + } + if (start == 0) { + start = 1; + } + if (len == -1) { + if (start > 0) { + evalString(str.substring(start - 1)); + } + } else { + evalString(str.substring(start - 1, start - 1 + len)); + } + } + + /** + * SUBSTRING FROM FOR function + */ + void substring(HplsqlParser.Expr_spec_funcContext ctx) { + String str = evalPop(ctx.expr(0)).toString(); + int start = evalPop(ctx.expr(1)).intValue(); + int len = -1; + if (start == 0) { + start = 1; + } + if (ctx.T_FOR() != null) { + len = evalPop(ctx.expr(2)).intValue(); + } + substr(str, start, len); + } + + /** + * TRIM function + */ + void trim(HplsqlParser.Expr_spec_funcContext ctx) { + int cnt = ctx.expr().size(); + if (cnt != 1) { + evalNull(); + return; + } + String str = evalPop(ctx.expr(0)).toString(); + evalString(str.trim()); + } + + /** + * TO_CHAR function + */ + void toChar(HplsqlParser.Expr_func_paramsContext ctx) { + int cnt = getParamCount(ctx); + if (cnt != 1) { + evalNull(); + return; + } + String str = evalPop(ctx.func_param(0).expr()).toString(); + evalString(str); + } + + /** + * UPPER function + */ + void upper(HplsqlParser.Expr_func_paramsContext ctx) { + if (ctx.func_param().size() != 1) { + evalNull(); + return; + } + String str = evalPop(ctx.func_param(0).expr()).toString().toUpperCase(); + evalString(str); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/InMemoryFunctionRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/InMemoryFunctionRegistry.java new file mode 100644 index 00000000000000..66ac1e63aade9f --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/functions/InMemoryFunctionRegistry.java @@ -0,0 +1,261 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/InMemoryFunctionRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.functions; + +import org.apache.doris.hplsql.Exec; +import org.apache.doris.hplsql.HplsqlParser; +import org.apache.doris.hplsql.Scope; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.exception.ArityException; +import org.apache.doris.hplsql.objects.TableClass; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +/** + * HPL/SQL functions + */ +public class InMemoryFunctionRegistry implements FunctionRegistry { + Exec exec; + private BuiltinFunctions builtinFunctions; + HashMap funcMap = new HashMap<>(); + HashMap procMap = new HashMap<>(); + boolean trace = false; + + public InMemoryFunctionRegistry(Exec e, BuiltinFunctions builtinFunctions) { + this.exec = e; + this.trace = exec.getTrace(); + this.builtinFunctions = builtinFunctions; + } + + @Override + public boolean exists(String name) { + return funcMap.containsKey(name) || procMap.containsKey(name); + } + + @Override + public void remove(String name) { + funcMap.remove(name); + procMap.remove(name); + } + + @Override + public boolean exec(String name, HplsqlParser.Expr_func_paramsContext ctx) { + if (builtinFunctions.exec(name, ctx)) { + return true; + } + if (execFunction(name, ctx)) { + return true; + } + return (procMap.get(name) != null && execProc(name, ctx)); + } + + /** + * Execute a user-defined function + */ + private boolean execFunction(String name, HplsqlParser.Expr_func_paramsContext ctx) { + HplsqlParser.Create_function_stmtContext userCtx = funcMap.get(name); + if (userCtx == null) { + return false; + } + if (trace) { + trace(ctx, "EXEC FUNCTION " + name); + } + ArrayList actualParams = getActualCallParameters(ctx); + exec.enterScope(Scope.Type.ROUTINE); + setCallParameters(name, ctx, actualParams, userCtx.create_routine_params(), null, exec); + if (userCtx.declare_block_inplace() != null) { + visit(userCtx.declare_block_inplace()); + } + visit(userCtx.single_block_stmt()); + exec.leaveScope(); + return true; + } + + /** + * Execute a stored procedure using CALL or EXEC statement passing parameters + */ + private boolean execProc(String name, HplsqlParser.Expr_func_paramsContext ctx) { + if (trace) { + trace(ctx == null ? null : ctx.getParent(), "EXEC PROCEDURE " + name); + } + HplsqlParser.Create_procedure_stmtContext procCtx = procMap.get(name); + if (procCtx == null) { + trace(ctx.getParent(), "Procedure not found"); + return false; + } + ArrayList actualParams = getActualCallParameters(ctx); + HashMap out = new HashMap<>(); + exec.enterScope(Scope.Type.ROUTINE); + exec.callStackPush(name); + if (procCtx.declare_block_inplace() != null) { + visit(procCtx.declare_block_inplace()); + } + if (procCtx.create_routine_params() != null) { + setCallParameters(name, ctx, actualParams, procCtx.create_routine_params(), out, exec); + } + visit(procCtx.proc_block()); + exec.callStackPop(); + exec.leaveScope(); + for (Map.Entry i : out.entrySet()) { // Set OUT parameters + exec.setVariable(i.getKey(), i.getValue()); + } + return true; + } + + /** + * Set parameters for user-defined function call + */ + public static void setCallParameters(String procName, HplsqlParser.Expr_func_paramsContext actual, + ArrayList actualValues, HplsqlParser.Create_routine_paramsContext formal, HashMap out, + Exec exec) { + if (actual == null || actual.func_param() == null || actualValues == null) { + return; + } + int actualCnt = actualValues.size(); + int formalCnt = formal.create_routine_param_item().size(); + if (formalCnt != actualCnt) { + throw new ArityException(actual.getParent(), procName, formalCnt, actualCnt); + } + for (int i = 0; i < actualCnt; i++) { + HplsqlParser.ExprContext a = actual.func_param(i).expr(); + HplsqlParser.Create_routine_param_itemContext p = getCallParameter(actual, formal, i); + String name = p.ident().getText(); + String type = p.dtype().getText(); + String len = null; + String scale = null; + if (p.dtype_len() != null) { + len = p.dtype_len().L_INT(0).getText(); + if (p.dtype_len().L_INT(1) != null) { + scale = p.dtype_len().L_INT(1).getText(); + } + } + Var var = setCallParameter(name, type, len, scale, actualValues.get(i), exec); + exec.trace(actual, "SET PARAM " + name + " = " + var.toString()); + if (out != null && a.expr_atom() != null && a.expr_atom().qident() != null && (p.T_OUT() != null + || p.T_INOUT() != null)) { + String actualName = a.expr_atom().qident().getText(); + if (actualName != null) { + out.put(actualName, var); + } + } + } + } + + /** + * Create a function or procedure parameter and set its value + */ + static Var setCallParameter(String name, String typeName, String len, String scale, Var value, Exec exec) { + TableClass hplClass = exec.getType(typeName); + Var var = new Var(name, hplClass == null ? typeName : Var.Type.HPL_OBJECT.name(), len, scale, null); + if (hplClass != null) { + var.setValue(hplClass.newInstance()); + } + var.cast(value); + exec.addVariable(var); + return var; + } + + /** + * Get call parameter definition by name (if specified) or position + */ + static HplsqlParser.Create_routine_param_itemContext getCallParameter(HplsqlParser.Expr_func_paramsContext actual, + HplsqlParser.Create_routine_paramsContext formal, int pos) { + String named; + int outPos = pos; + if (actual.func_param(pos).ident() != null) { + named = actual.func_param(pos).ident().getText(); + int cnt = formal.create_routine_param_item().size(); + for (int i = 0; i < cnt; i++) { + if (named.equalsIgnoreCase(formal.create_routine_param_item(i).ident().getText())) { + outPos = i; + break; + } + } + } + return formal.create_routine_param_item(outPos); + } + + /** + * Evaluate actual call parameters + */ + public ArrayList getActualCallParameters(HplsqlParser.Expr_func_paramsContext actual) { + if (actual == null || actual.func_param() == null) { + return null; + } + int cnt = actual.func_param().size(); + ArrayList values = new ArrayList<>(cnt); + for (int i = 0; i < cnt; i++) { + values.add(evalPop(actual.func_param(i).expr())); + } + return values; + } + + @Override + public void addUserFunction(HplsqlParser.Create_function_stmtContext ctx) { + String name = ctx.ident().getText().toUpperCase(); + if (builtinFunctions.exists(name)) { + exec.info(ctx, name + " is a built-in function which cannot be redefined."); + return; + } + if (trace) { + trace(ctx, "CREATE FUNCTION " + name); + } + funcMap.put(name.toUpperCase(), ctx); + } + + @Override + public void addUserProcedure(HplsqlParser.Create_procedure_stmtContext ctx) { + String name = ctx.ident(0).getText().toUpperCase(); + if (builtinFunctions.exists(name)) { + exec.info(ctx, name + " is a built-in function which cannot be redefined."); + return; + } + if (trace) { + trace(ctx, "CREATE PROCEDURE " + name); + } + procMap.put(name.toUpperCase(), ctx); + } + + /** + * Evaluate the expression and pop value from the stack + */ + private Var evalPop(ParserRuleContext ctx) { + exec.visit(ctx); + return exec.stackPop(); + } + + /** + * Execute rules + */ + private Integer visit(ParserRuleContext ctx) { + return exec.visit(ctx); + } + + private void trace(ParserRuleContext ctx, String message) { + if (trace) { + exec.trace(ctx, message); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/DbmOutput.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/DbmOutput.java new file mode 100644 index 00000000000000..da93f3d2ac984b --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/DbmOutput.java @@ -0,0 +1,51 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/DbmOutput.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.Console; +import org.apache.doris.hplsql.Var; + +import java.util.List; + +public class DbmOutput implements org.apache.doris.hplsql.objects.HplObject { + private final org.apache.doris.hplsql.objects.HplClass hplClass; + private Console console; + + public DbmOutput(org.apache.doris.hplsql.objects.HplClass hplClass) { + this.hplClass = hplClass; + } + + public void initialize(Console console) { + this.console = console; + } + + @Override + public org.apache.doris.hplsql.objects.HplClass hplClass() { + return hplClass; + } + + public Var putLine(List params) { + if (!params.isEmpty()) { + console.printLine(params.get(0).toString()); + } + return null; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/DbmOutputClass.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/DbmOutputClass.java new file mode 100644 index 00000000000000..f4520cc364265f --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/DbmOutputClass.java @@ -0,0 +1,46 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/DbmOutputClass.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.objects.MethodParams.Arity; + +public class DbmOutputClass implements org.apache.doris.hplsql.objects.HplClass { + public static final DbmOutputClass INSTANCE = new DbmOutputClass(); + private final org.apache.doris.hplsql.objects.MethodDictionary methodDictionary + = new org.apache.doris.hplsql.objects.MethodDictionary(); + + private DbmOutputClass() { + methodDictionary.put("put_line", (self, args) -> { + Arity.UNARY.check("put_line", args); + return self.putLine(args); + }); + } + + @Override + public DbmOutput newInstance() { + return new DbmOutput(this); + } + + @Override + public org.apache.doris.hplsql.objects.MethodDictionary methodDictionary() { + return methodDictionary; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/HplClass.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/HplClass.java new file mode 100644 index 00000000000000..ce7f219bb189d1 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/HplClass.java @@ -0,0 +1,27 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/HplClass.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +public interface HplClass { + HplObject newInstance(); + + org.apache.doris.hplsql.objects.MethodDictionary methodDictionary(); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/HplObject.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/HplObject.java new file mode 100644 index 00000000000000..8d712db9cb52ac --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/HplObject.java @@ -0,0 +1,25 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/HplObject.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +public interface HplObject { + HplClass hplClass(); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/Method.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/Method.java new file mode 100644 index 00000000000000..14223f69cfd057 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/Method.java @@ -0,0 +1,29 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/Method.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.Var; + +import java.util.List; + +public interface Method { + Var call(T self, List args); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/MethodDictionary.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/MethodDictionary.java new file mode 100644 index 00000000000000..3e355ce92165a6 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/MethodDictionary.java @@ -0,0 +1,46 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/MethodDictionary.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.exception.NoSuchHplMethodException; + +import org.antlr.v4.runtime.ParserRuleContext; + +import java.util.HashMap; +import java.util.Map; + +public class MethodDictionary { + public static final String __GETITEM__ = "__GETITEM__"; + public static final String __SETITEM__ = "__SETITEM__"; + private final Map> dict = new HashMap<>(); + + public void put(String methodName, Method method) { + dict.put(methodName.toUpperCase(), method); + } + + public Method get(ParserRuleContext ctx, String methodName) { + Method result = dict.get(methodName.toUpperCase()); + if (result == null) { + throw new NoSuchHplMethodException(ctx, "No such method " + methodName); + } + return result; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/MethodParams.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/MethodParams.java new file mode 100644 index 00000000000000..29835d6385cf08 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/MethodParams.java @@ -0,0 +1,96 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/MethodParams.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.File; +import org.apache.doris.hplsql.Row; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.exception.ArityException; +import org.apache.doris.hplsql.exception.TypeException; + +import java.util.List; + +public class MethodParams { + private final List actual; + + public MethodParams(String methodName, List actual, Arity arity) { + this.actual = actual; + arity.check(methodName, actual); + } + + public Long longAt(int nth) { + return at(nth, Long.class); + } + + public Row rowAt(int nth) { + return at(nth, Row.class); + } + + public String stringAt(int nth) { + return at(nth, String.class); + } + + public File fileAt(int nth) { + return at(nth, File.class); + } + + public T at(int nth, Class clazz) { + try { + return clazz.cast(actual.get(nth).value); + } catch (ClassCastException e) { + throw new TypeException(null, clazz, actual.get(nth).type, actual.get(nth).value); + } + } + + public interface Arity { + void check(String methodName, List params); + + Arity NULLARY = Arity.of(0); + Arity UNARY = Arity.of(1); + Arity BINARY = Arity.of(2); + + static Arity of(int count) { + return (methodName, params) -> { + if (params.size() != count) { + throw new ArityException(null, methodName, count, params.size()); + } + }; + } + + static Arity min(int count) { + return (methodName, params) -> { + if (params.size() < count) { + throw new ArityException(null, "wrong number of arguments in call to '" + methodName + + "'. Expected at least " + count + " got " + params.size() + "."); + } + }; + } + + static Arity max(int count) { + return (methodName, params) -> { + if (params.size() > count) { + throw new ArityException(null, "wrong number of arguments in call to '" + methodName + + "'. Expected at most " + count + " got " + params.size() + "."); + } + }; + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/Table.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/Table.java new file mode 100644 index 00000000000000..57aad8e1d1cf5b --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/Table.java @@ -0,0 +1,224 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/Table.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.ColumnDefinition; +import org.apache.doris.hplsql.Row; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.executor.QueryResult; + +import java.util.HashMap; +import java.util.Map; + +/** + * Oracle's PL/SQL Table/associative array. + *

+ * Tables can be modelled after a corresponding Hive table or they can be created manually. + *

+ * 1. Model the table after the emp Hive table + * TYPE t_tab IS TABLE OF emp%ROWTYPE INDEX BY BINARY_INTEGER; + *

+ * 2. Model the table after a column of a Hive table (emp.name). This table will hold a single column only. + * TYPE t_tab IS TABLE OF emp.name%TYPE INDEX BY BINARY_INTEGER; + *

+ * 3. Or you can specify the column manually. This table will hold one column only. + * TYPE t_tab IS TABLE OF NUMBER INDEX BY BINARY_INTEGER; + *

+ * In the first case the values will be records where each key in the record matches the columns to the corresponding + * table. + * tab(key).col_name; + *

+ * In the last two cases the values will represent scalars, but they stored in a record with a single key. + * tab(key) + *

+ * Iteration logic uses the first/last next and prior methods. + * First/last returns a key, next/prior gives back the next or previous key based on the key passed in. + */ +public class Table implements HplObject { + private final TableClass hplClass; + private final Map rows = new HashMap<>(); + private Object lastKey = null; + private Object firstKey = null; + + public Table(TableClass hplClass) { + this.hplClass = hplClass; + } + + public void populate(QueryResult query, long rowIndex, int columnIndex) { + if (hplClass().rowType()) { + putRow(rowIndex, query); + } else { + putColumn(rowIndex, query, columnIndex); + } + } + + public void putRow(Object key, QueryResult result) { + put(key, readRow(result)); + } + + public void putColumn(Object key, QueryResult query, int columnIndex) { + put(key, readColumn(query, columnIndex)); + } + + public void put(Object key, Row row) { + Value existing = rows.get(key); + if (existing != null) { + existing.row = row; + } else { + if (lastKey != null) { + rows.get(lastKey).nextKey = key; + } + rows.put(key, new Value(row, lastKey)); + lastKey = key; + if (firstKey == null) { + firstKey = key; + } + } + } + + private Row readRow(QueryResult result) { + Row row = new Row(); + int idx = 0; + for (ColumnDefinition column : hplClass.columns()) { + Var var = new Var(column.columnName(), column.columnType().typeString(), (Integer) null, null, null); + var.setValue(result, idx); + row.addColumn(column.columnName(), column.columnTypeString(), var); + idx++; + } + return row; + } + + private Row readColumn(QueryResult result, int columnIndex) { + Row row = new Row(); + ColumnDefinition column = hplClass.columns().get(0); + Var var = new Var(column.columnName(), column.columnType().typeString(), (Integer) null, null, null); + var.setValue(result, columnIndex); + row.addColumn(column.columnName(), column.columnTypeString(), var); + return row; + } + + public Row at(Object key) { + Value value = rows.get(key); + return value == null ? null : value.row; + } + + public boolean removeAt(Object key) { + Value value = rows.remove(key); + if (value != null) { + updateLinks(key, value.nextKey, value.prevKey); + } + return value != null; + } + + private void updateLinks(Object deletedKey, Object nextKey, Object prevKey) { + if (prevKey != null) { + rows.get(prevKey).nextKey = nextKey; + } + if (nextKey != null) { + rows.get(nextKey).prevKey = prevKey; + } + if (deletedKey.equals(firstKey)) { + firstKey = nextKey; + } + if (deletedKey.equals(lastKey)) { + lastKey = prevKey; + } + } + + public void removeFromTo(Object fromKey, Object toKey) { + Object current = fromKey; + while (current != null && !current.equals(toKey)) { + Object next = nextKey(current); + removeAt(current); + current = next; + } + if (current != null) { + removeAt(current); + } + } + + public void removeAll() { + lastKey = null; + firstKey = null; + rows.clear(); + } + + public Object nextKey(Object key) { + Value value = rows.get(key); + return value == null ? null : value.nextKey; + } + + public Object priorKey(Object key) { + Value value = rows.get(key); + return value == null ? null : value.prevKey; + } + + public Object firstKey() { + return firstKey; + } + + public Object lastKey() { + return lastKey; + } + + public boolean existsAt(Object key) { + return rows.get(key) != null; + } + + public int count() { + return rows.size(); + } + + @Override + public TableClass hplClass() { + return hplClass; + } + + private static class Value { + private Row row; + private Object prevKey; + private Object nextKey; + + public Value(Row row, Object prevKey) { + this.row = row; + this.prevKey = prevKey; + } + + public void setPrevKey(Object prevKey) { + this.prevKey = prevKey; + } + + public void setNextKey(Object nextKey) { + this.nextKey = nextKey; + } + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder("Table{"); + sb.append("hplClass=").append(hplClass.getClass()); + sb.append(", size=").append(count()); + sb.append(", lastKey=").append(lastKey); + sb.append(", firstKey=").append(firstKey); + sb.append('}'); + return sb.toString(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/TableClass.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/TableClass.java new file mode 100644 index 00000000000000..ef75f6007516d4 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/TableClass.java @@ -0,0 +1,135 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/TableClass.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.ColumnDefinition; +import org.apache.doris.hplsql.Row; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.objects.MethodParams.Arity; + +import java.util.List; + + +public class TableClass implements HplClass { + private static final MethodDictionary

methodDictionary = new MethodDictionary<>(); + private final String typeName; + private final List columns; + private final boolean rowType; + + static { + methodDictionary.put("first", (self, args) -> { + Arity.NULLARY.check("first", args); + return wrap(self.firstKey()); + }); + methodDictionary.put("last", (self, args) -> { + Arity.NULLARY.check("last", args); + return wrap(self.lastKey()); + }); + methodDictionary.put("next", (self, args) -> { + Long key = new MethodParams("next", args, Arity.UNARY).longAt(0); + return wrap(self.nextKey(key)); + }); + methodDictionary.put("prior", (self, args) -> { + Long key = new MethodParams("prior", args, Arity.UNARY).longAt(0); + return wrap(self.priorKey(key)); + }); + methodDictionary.put("count", (self, args) -> { + Arity.NULLARY.check("count", args); + return new Var(Long.valueOf(self.count())); + }); + methodDictionary.put("exists", (self, args) -> { + Long key = new MethodParams("exists", args, Arity.UNARY).longAt(0); + return new Var(self.existsAt(key)); + }); + methodDictionary.put("delete", (self, args) -> { + Arity.max(3).check("delete", args); + if (args.isEmpty()) { + self.removeAll(); + } else if (args.size() == 1) { + self.removeAt(args.get(0).value); + } else { + self.removeFromTo(args.get(0).value, args.get(1).value); + } + return null; + }); + methodDictionary.put(MethodDictionary.__GETITEM__, (self, args) -> { + Long key = new MethodParams(MethodDictionary.__GETITEM__, args, Arity.UNARY).longAt(0); + Row row = self.at(key); + if (row == null) { + return Var.Null; + } + if (self.hplClass().rowType()) { + Var var = new Var(); + var.setType(Var.Type.ROW.name()); + var.setValue(row); + return var; + } + return row.getValue(0); + }); + methodDictionary.put(MethodDictionary.__SETITEM__, (self, args) -> { + MethodParams params = new MethodParams(MethodDictionary.__SETITEM__, args, Arity.BINARY); + long key = params.longAt(0); + if (self.hplClass().rowType()) { + self.put(key, params.rowAt(1)); + } else { // single column + Row row = new Row(); + row.addColumn( + self.hplClass().columns().get(0).columnName(), + self.hplClass().columns.get(0).columnTypeString(), + args.get(1)); + self.put(key, row); + } + return Var.Null; + }); + } + + private static Var wrap(Object result) { + return result != null ? new Var((Long) result) : Var.Null; + } + + public TableClass(String typeName, List columns, boolean rowType) { + this.typeName = typeName; + this.columns = columns; + this.rowType = rowType; + } + + public String typeName() { + return typeName; + } + + public List columns() { + return columns; + } + + @Override + public Table newInstance() { + return new Table(this); + } + + @Override + public MethodDictionary methodDictionary() { + return methodDictionary; + } + + public boolean rowType() { + return rowType; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/UtlFile.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/UtlFile.java new file mode 100644 index 00000000000000..9d5d2f5cb2e02e --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/UtlFile.java @@ -0,0 +1,78 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/UtlFile.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.File; + +import java.io.EOFException; +import java.io.IOException; + +public class UtlFile implements HplObject { + private final UtlFileClass hplClass; + + public UtlFile(UtlFileClass hplClass) { + this.hplClass = hplClass; + } + + @Override + public HplClass hplClass() { + return hplClass; + } + + public File fileOpen(String dir, String name, boolean write, boolean overwrite) { + File file = new File(); + if (write) { + file.create(dir, name, overwrite); + } else { + file.open(dir, name); + } + return file; + } + + public void fileClose(File file) { + file.close(); + } + + public String getLine(File file) { + StringBuilder out = new StringBuilder(); + try { + while (true) { + char c = file.readChar(); + if (c == '\n') { + break; + } + out.append(c); + } + } catch (IOException e) { + if (!(e instanceof EOFException)) { + out.setLength(0); + } + } + return out.toString(); + } + + public void put(File file, String str, boolean newLine) { + file.writeString(str); + if (newLine) { + file.writeString("\n"); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/UtlFileClass.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/UtlFileClass.java new file mode 100644 index 00000000000000..0e054e64f2555e --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/objects/UtlFileClass.java @@ -0,0 +1,80 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/UtlFileClass.java +// and modified by Doris + +package org.apache.doris.hplsql.objects; + +import org.apache.doris.hplsql.File; +import org.apache.doris.hplsql.Var; +import org.apache.doris.hplsql.objects.MethodParams.Arity; + +public class UtlFileClass implements HplClass { + public static final UtlFileClass INSTANCE = new UtlFileClass(); + private final MethodDictionary methodDictionary = new MethodDictionary(); + + private UtlFileClass() { + methodDictionary.put("fopen", (self, args) -> { + MethodParams params = new MethodParams("fopen", args, Arity.min(2)); + String dir = params.stringAt(0); + String name = params.stringAt(1); + boolean write = true; + boolean overwrite = false; + if (args.size() > 2) { + String mode = params.stringAt(2); + if (mode.equalsIgnoreCase("r")) { + write = false; + } else if (mode.equalsIgnoreCase("w")) { + write = true; + overwrite = true; + } + } + File file = self.fileOpen(dir, name, write, overwrite); + return new Var(Var.Type.FILE, file); + }); + methodDictionary.put("get_line", (self, args) -> { + MethodParams params = new MethodParams("get_line", args, Arity.UNARY); + return new Var(self.getLine(params.fileAt(0))); + }); + methodDictionary.put("put_line", (self, args) -> { + MethodParams params = new MethodParams("put_line", args, Arity.BINARY); + self.put(params.fileAt(0), params.stringAt(1), true); + return null; + }); + methodDictionary.put("put", (self, args) -> { + MethodParams params = new MethodParams("put", args, Arity.BINARY); + self.put(params.fileAt(0), params.stringAt(1), false); + return null; + }); + methodDictionary.put("fclose", (self, args) -> { + File file = new MethodParams("fclose", args, Arity.UNARY).fileAt(0); + self.fileClose(file); + return null; + }); + } + + @Override + public UtlFile newInstance() { + return new UtlFile(this); + } + + @Override + public MethodDictionary methodDictionary() { + return methodDictionary; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/DorisPackageRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/DorisPackageRegistry.java new file mode 100644 index 00000000000000..a5098db728df25 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/DorisPackageRegistry.java @@ -0,0 +1,95 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/packages/HmsPackageRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.packages; + +import org.apache.doris.hplsql.store.HplsqlPackage; +import org.apache.doris.hplsql.store.MetaClient; +import org.apache.doris.qe.ConnectContext; + +import org.apache.commons.lang3.StringUtils; +import org.apache.thrift.TException; + +import java.util.Optional; + +public class DorisPackageRegistry implements PackageRegistry { + private final MetaClient client; + + public DorisPackageRegistry(MetaClient client) { + this.client = client; + } + + @Override + public Optional getPackage(String name) { + try { + HplsqlPackage pkg = findPackage(name); + return pkg == null + ? Optional.empty() + : Optional.of(pkg.getHeader() + ";\n" + pkg.getBody()); + } catch (TException e) { + throw new RuntimeException(e); + } + } + + @Override + public void createPackageHeader(String name, String header, boolean replace) { + try { + HplsqlPackage existing = findPackage(name); + if (existing != null && !replace) { + throw new RuntimeException("Package " + name + " already exists"); + } + savePackage(name, header, ""); + } catch (TException e) { + throw new RuntimeException(e); + } + } + + @Override + public void createPackageBody(String name, String body, boolean replace) { + try { + HplsqlPackage existing = findPackage(name); + if (existing == null || StringUtils.isEmpty(existing.getHeader())) { + throw new RuntimeException("Package header does not exists " + name); + } + if (StringUtils.isNotEmpty(existing.getBody()) && !replace) { + throw new RuntimeException("Package body " + name + " already exists"); + } + savePackage(name, existing.getHeader(), body); + } catch (TException e) { + throw new RuntimeException(e); + } + } + + private HplsqlPackage findPackage(String name) throws TException { + return client.getHplsqlPackage(name.toUpperCase(), ConnectContext.get().getCurrentCatalog().getName(), + ConnectContext.get().getDatabase()); + } + + @Override + public void dropPackage(String name) { + client.dropHplsqlPackage(name, ConnectContext.get().getCurrentCatalog().getName(), + ConnectContext.get().getDatabase()); + } + + private void savePackage(String name, String header, String body) { + client.addHplsqlPackage(name.toUpperCase(), ConnectContext.get().getCurrentCatalog().getName(), + ConnectContext.get().getDatabase(), ConnectContext.get().getQualifiedUser(), header, body); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/InMemoryPackageRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/InMemoryPackageRegistry.java new file mode 100644 index 00000000000000..51128bc57f587c --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/InMemoryPackageRegistry.java @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/packages/InMemoryPackageRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.packages; + +import org.apache.commons.lang3.StringUtils; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +public class InMemoryPackageRegistry implements PackageRegistry { + private Map registry = new HashMap<>(); + + @Override + public Optional getPackage(String name) { + Source src = registry.get(name.toUpperCase()); + return src == null + ? Optional.empty() + : Optional.of(src.header + ";\n" + src.body); + } + + @Override + public void createPackageHeader(String name, String header, boolean replace) { + if (registry.containsKey(name) && !replace) { + throw new RuntimeException("Package " + name + " already exits"); + } + registry.put(name, new Source(header, "")); + } + + @Override + public void createPackageBody(String name, String body, boolean replace) { + Source existing = registry.get(name); + if (existing == null || StringUtils.isEmpty(existing.header)) { + throw new RuntimeException("Package header does not exists " + name); + } + if (StringUtils.isNotEmpty(existing.body) && !replace) { + throw new RuntimeException("Package body " + name + " already exits"); + } + registry.getOrDefault(name, new Source("", "")).body = body; + } + + @Override + public void dropPackage(String name) { + registry.remove(name); + } + + private static class Source { + String header; + String body; + + public Source(String header, String body) { + this.header = header; + this.body = body; + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/PackageRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/PackageRegistry.java new file mode 100644 index 00000000000000..484e41fac7363d --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/packages/PackageRegistry.java @@ -0,0 +1,33 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// This file is copied from +// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/packages/PackageRegistry.java +// and modified by Doris + +package org.apache.doris.hplsql.packages; + +import java.util.Optional; + +public interface PackageRegistry { + Optional getPackage(String name); + + void createPackageHeader(String name, String header, boolean replace); + + void createPackageBody(String name, String body, boolean replace); + + void dropPackage(String name); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/HplsqlManager.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/HplsqlManager.java new file mode 100644 index 00000000000000..1cc0bcf55213a4 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/HplsqlManager.java @@ -0,0 +1,122 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.store; + +import org.apache.doris.catalog.Env; +import org.apache.doris.common.io.Text; +import org.apache.doris.common.io.Writable; +import org.apache.doris.persist.gson.GsonUtils; + +import com.google.common.collect.Maps; +import com.google.gson.annotations.SerializedName; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.Map; + +public class HplsqlManager implements Writable { + private static final Logger LOG = LogManager.getLogger(HplsqlManager.class); + + @SerializedName(value = "nameToStoredProcedures") + Map nameToStoredProcedures = Maps.newConcurrentMap(); + + @SerializedName(value = "nameToPackages") + Map nameToPackages = Maps.newConcurrentMap(); + + public HplsqlManager() { + } + + public StoredProcedure getStoredProcedure(StoredKey storedKey) { + return nameToStoredProcedures.get(storedKey); + } + + public void addStoredProcedure(StoredProcedure procedure, boolean isForce) { + StoredKey storedKey = new StoredKey(procedure.getName(), procedure.getCatalogName(), procedure.getDbName()); + if (isForce) { + nameToStoredProcedures.put(storedKey, procedure); + } else if (nameToStoredProcedures.putIfAbsent(storedKey, procedure) != null) { + throw new RuntimeException(storedKey + ", stored procedure already exist."); + } + Env.getCurrentEnv().getEditLog().logAddStoredProcedure(procedure); + LOG.info("Add stored procedure success: {}", storedKey); + } + + public void replayAddStoredProcedure(StoredProcedure procedure) { + StoredKey storedKey = new StoredKey(procedure.getName(), procedure.getCatalogName(), procedure.getDbName()); + nameToStoredProcedures.put(storedKey, procedure); + LOG.info("Replay add stored procedure success: {}", storedKey); + } + + public void dropStoredProcedure(StoredKey storedKey) { + nameToStoredProcedures.remove(storedKey); + Env.getCurrentEnv().getEditLog().logDropStoredProcedure(storedKey); + LOG.info("Drop stored procedure success: {}", storedKey); + } + + public void replayDropStoredProcedure(StoredKey storedKey) { + nameToStoredProcedures.remove(storedKey); + LOG.info("Replay drop stored procedure success: {}", storedKey); + } + + public HplsqlPackage getPackage(StoredKey storedKey) { + return nameToPackages.get(storedKey); + } + + public void addPackage(HplsqlPackage pkg, boolean isForce) { + StoredKey storedKey = new StoredKey(pkg.getName(), pkg.getCatalogName(), pkg.getDbName()); + nameToPackages.put(storedKey, pkg); + if (isForce) { + nameToPackages.put(storedKey, pkg); + } else if (nameToPackages.putIfAbsent(storedKey, pkg) != null) { + throw new RuntimeException(storedKey + ", package already exist."); + } + Env.getCurrentEnv().getEditLog().logAddHplsqlPackage(pkg); + LOG.info("Add hplsql package success: {}", storedKey); + } + + public void replayAddPackage(HplsqlPackage pkg) { + StoredKey storedKey = new StoredKey(pkg.getName(), pkg.getCatalogName(), pkg.getDbName()); + nameToPackages.put(storedKey, pkg); + LOG.info("Replay add hplsql package success: {}", storedKey); + } + + public void dropPackage(StoredKey storedKey) { + nameToPackages.remove(storedKey); + Env.getCurrentEnv().getEditLog().logDropHplsqlPackage(storedKey); + LOG.info("Drop hplsql package success: {}", storedKey); + } + + public void replayDropPackage(StoredKey storedKey) { + nameToPackages.remove(storedKey); + LOG.info("Replay drop hplsql package success: {}", storedKey); + } + + @Override + public void write(DataOutput out) throws IOException { + String json = GsonUtils.GSON.toJson(this); + Text.writeString(out, json); + } + + public static HplsqlManager read(DataInput in) throws IOException { + String json = Text.readString(in); + return GsonUtils.GSON.fromJson(json, HplsqlManager.class); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/HplsqlPackage.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/HplsqlPackage.java new file mode 100644 index 00000000000000..1f84410652c7ce --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/HplsqlPackage.java @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.store; + +import org.apache.doris.common.io.Text; +import org.apache.doris.common.io.Writable; +import org.apache.doris.persist.gson.GsonUtils; +import org.apache.doris.thrift.THplsqlPackage; + +import com.google.gson.annotations.SerializedName; +import lombok.AllArgsConstructor; +import lombok.Getter; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +@AllArgsConstructor +@Getter +public class HplsqlPackage implements Writable { + @SerializedName(value = "name") + private String name; + + @SerializedName(value = "catalogName") + private String catalogName; + + @SerializedName(value = "dbName") + private String dbName; + + @SerializedName(value = "ownerName") + private String ownerName; + + @SerializedName(value = "header") + private String header; + + @SerializedName(value = "body") + private String body; + + public static HplsqlPackage read(DataInput in) throws IOException { + String json = Text.readString(in); + return GsonUtils.GSON.fromJson(json, HplsqlPackage.class); + } + + public THplsqlPackage toThrift() { + return new THplsqlPackage().setName(name).setCatalogName(catalogName).setDbName(dbName).setOwnerName(ownerName) + .setHeader(header).setBody(body); + } + + public static HplsqlPackage fromThrift(THplsqlPackage pkg) { + return new HplsqlPackage(pkg.getName(), pkg.getCatalogName(), pkg.getDbName(), pkg.getOwnerName(), + pkg.getHeader(), pkg.getBody()); + } + + @Override + public void write(DataOutput out) throws IOException { + String json = GsonUtils.GSON.toJson(this); + Text.writeString(out, json); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/MetaClient.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/MetaClient.java new file mode 100644 index 00000000000000..a2c8c97bacabe2 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/MetaClient.java @@ -0,0 +1,198 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.store; + +import org.apache.doris.catalog.Env; +import org.apache.doris.common.ClientPool; +import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.thrift.FrontendService; +import org.apache.doris.thrift.TAddHplsqlPackageRequest; +import org.apache.doris.thrift.TAddStoredProcedureRequest; +import org.apache.doris.thrift.THplsqlPackage; +import org.apache.doris.thrift.THplsqlPackageRequest; +import org.apache.doris.thrift.TNetworkAddress; +import org.apache.doris.thrift.TStatus; +import org.apache.doris.thrift.TStatusCode; +import org.apache.doris.thrift.TStoredKey; +import org.apache.doris.thrift.TStoredProcedure; +import org.apache.doris.thrift.TStoredProcedureRequest; + +import org.apache.thrift.TException; + +import java.util.Objects; + +public class MetaClient { + public MetaClient() { + } + + public void addStoredProcedure(String name, String catalogName, String dbName, String ownerName, String source, + boolean isForce) { + checkPriv(); + if (Env.getCurrentEnv().isMaster()) { + Env.getCurrentEnv().getHplsqlManager() + .addStoredProcedure(new StoredProcedure(name, catalogName, dbName, ownerName, source), isForce); + } else { + addStoredProcedureThrift(name, catalogName, dbName, ownerName, source, isForce); + } + } + + public void dropStoredProcedure(String name, String catalogName, String dbName) { + checkPriv(); + if (Env.getCurrentEnv().isMaster()) { + Env.getCurrentEnv().getHplsqlManager().dropStoredProcedure(new StoredKey(name, catalogName, dbName)); + } else { + dropStoredProcedureThrift(name, catalogName, dbName); + } + } + + public StoredProcedure getStoredProcedure(String name, String catalogName, String dbName) { + return Env.getCurrentEnv().getHplsqlManager().getStoredProcedure(new StoredKey(name, catalogName, dbName)); + } + + public void addHplsqlPackage(String name, String catalogName, String dbName, String ownerName, String header, + String body) { + checkPriv(); + if (Env.getCurrentEnv().isMaster()) { + Env.getCurrentEnv().getHplsqlManager() + .addPackage(new HplsqlPackage(name, catalogName, dbName, ownerName, header, body), + false); + } else { + addHplsqlPackageThrift(name, catalogName, dbName, ownerName, header, body); + } + } + + public void dropHplsqlPackage(String name, String catalogName, String dbName) { + checkPriv(); + if (Env.getCurrentEnv().isMaster()) { + Env.getCurrentEnv().getHplsqlManager().dropPackage(new StoredKey(name, catalogName, dbName)); + } else { + dropHplsqlPackageThrift(name, catalogName, dbName); + } + } + + public HplsqlPackage getHplsqlPackage(String name, String catalogName, String dbName) { + return Env.getCurrentEnv().getHplsqlManager().getPackage(new StoredKey(name, catalogName, dbName)); + } + + protected void addStoredProcedureThrift(String name, String catalogName, String dbName, String ownerName, + String source, boolean isForce) { + TStoredProcedure tStoredProcedure = new TStoredProcedure().setName(name).setCatalogName(catalogName) + .setDbName(dbName).setOwnerName(ownerName).setSource(source); + TAddStoredProcedureRequest tAddStoredProcedureRequest = new TAddStoredProcedureRequest() + .setStoredProcedure(tStoredProcedure); + tAddStoredProcedureRequest.setIsForce(isForce); + + try { + sendUpdateRequest(tAddStoredProcedureRequest, + (request, client) -> client.addStoredProcedure(request).getStatus()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + protected void dropStoredProcedureThrift(String name, String catalogName, String dbName) { + TStoredKey tStoredKey = new TStoredKey().setName(name).setCatalogName(catalogName).setDbName(dbName); + TStoredProcedureRequest tStoredProcedureRequest = new TStoredProcedureRequest().setStoredKey(tStoredKey); + + try { + sendUpdateRequest(tStoredProcedureRequest, + (request, client) -> client.dropStoredProcedure(request).getStatus()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + protected void addHplsqlPackageThrift(String name, String catalogName, String dbName, String ownerName, + String header, String body) { + THplsqlPackage tHplsqlPackage = new THplsqlPackage().setName(name).setCatalogName(catalogName) + .setDbName(dbName).setOwnerName(ownerName).setHeader(header).setBody(body); + TAddHplsqlPackageRequest tAddHplsqlPackageRequest = new TAddHplsqlPackageRequest() + .setHplsqlPackage(tHplsqlPackage); + + try { + sendUpdateRequest(tAddHplsqlPackageRequest, + (request, client) -> client.addHplsqlPackage(request).getStatus()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + protected void dropHplsqlPackageThrift(String name, String catalogName, String dbName) { + TStoredKey tStoredKey = new TStoredKey().setName(name).setCatalogName(catalogName).setDbName(dbName); + THplsqlPackageRequest tHplsqlPackageRequest = new THplsqlPackageRequest().setStoredKey(tStoredKey); + + try { + sendUpdateRequest(tHplsqlPackageRequest, + (request, client) -> client.dropHplsqlPackage(request).getStatus()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private void checkPriv() { + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN)) { + throw new RuntimeException( + "Access denied; you need (at least one of) the ADMIN privilege(s) for this operation"); + } + } + + private void sendUpdateRequest(Request request, + BiFunction sendRequest) throws Exception { + TNetworkAddress masterAddress = new TNetworkAddress(Env.getCurrentEnv().getMasterHost(), + Env.getCurrentEnv().getMasterRpcPort()); + FrontendService.Client client = ClientPool.frontendPool.borrowObject(masterAddress); + TStatus status; + boolean isReturnToPool = true; + try { + status = sendRequest.apply(request, client); + checkResult(status); + } catch (Exception e) { + if (!ClientPool.frontendPool.reopen(client)) { + isReturnToPool = false; + throw e; + } + + status = sendRequest.apply(request, client); + checkResult(status); + } finally { + if (isReturnToPool) { + ClientPool.frontendPool.returnObject(masterAddress, client); + } else { + ClientPool.frontendPool.invalidateObject(masterAddress, client); + } + } + } + + private void checkResult(TStatus status) throws Exception { + if (Objects.isNull(status) || !status.isSetStatusCode()) { + throw new TException("Access master error, no status set."); + } + if (status.getStatusCode().equals(TStatusCode.OK)) { + return; + } + throw new Exception( + "Access fe error, code:" + status.getStatusCode().name() + ", mgs:" + status.getErrorMsgs()); + } + + @FunctionalInterface + public interface BiFunction { + R apply(T t, U u) throws Exception; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/StoredKey.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/StoredKey.java new file mode 100644 index 00000000000000..de29a4f849babf --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/StoredKey.java @@ -0,0 +1,90 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.store; + +import org.apache.doris.common.io.Text; +import org.apache.doris.common.io.Writable; +import org.apache.doris.persist.gson.GsonUtils; +import org.apache.doris.thrift.TStoredKey; + +import com.google.gson.annotations.SerializedName; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.Objects; + +public class StoredKey implements Writable { + private static final Logger LOG = LogManager.getLogger(StoredKey.class); + + @SerializedName(value = "name") + private String name; + + @SerializedName(value = "catalogName") + private String catalogName; + + @SerializedName(value = "dbName") + private String dbName; + + public StoredKey(String name, String catalogName, String dbName) { + this.name = name; + this.catalogName = catalogName; + this.dbName = dbName; + } + + public TStoredKey toThrift() { + return new TStoredKey().setName(name).setCatalogName(catalogName).setDbName(dbName); + } + + public static StoredKey fromThrift(TStoredKey key) { + return new StoredKey(key.getName(), key.getCatalogName(), key.getDbName()); + } + + @Override + public int hashCode() { + return Objects.hash(name, catalogName, dbName); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof StoredKey)) { + return false; + } + return Objects.equals(this.name, ((StoredKey) obj).name) && Objects.equals(this.catalogName, + ((StoredKey) obj).catalogName) + && Objects.equals(this.dbName, ((StoredKey) obj).dbName); + } + + @Override + public String toString() { + return "name:" + name + ", catalogName:" + catalogName + ", dbName:" + dbName; + } + + @Override + public void write(DataOutput out) throws IOException { + String json = GsonUtils.GSON.toJson(this); + Text.writeString(out, json); + } + + public static StoredKey read(DataInput in) throws IOException { + String json = Text.readString(in); + return GsonUtils.GSON.fromJson(json, StoredKey.class); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/StoredProcedure.java b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/StoredProcedure.java new file mode 100644 index 00000000000000..a13682a6cf8641 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/hplsql/store/StoredProcedure.java @@ -0,0 +1,71 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.hplsql.store; + +import org.apache.doris.common.io.Text; +import org.apache.doris.common.io.Writable; +import org.apache.doris.persist.gson.GsonUtils; +import org.apache.doris.thrift.TStoredProcedure; + +import com.google.gson.annotations.SerializedName; +import lombok.AllArgsConstructor; +import lombok.Getter; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +@AllArgsConstructor +@Getter +public class StoredProcedure implements Writable { + @SerializedName(value = "name") + private String name; + + @SerializedName(value = "catalogName") + private String catalogName; + + @SerializedName(value = "dbName") + private String dbName; + + @SerializedName(value = "ownerName") + private String ownerName; + + @SerializedName(value = "source") + private String source; + + public TStoredProcedure toThrift() { + return new TStoredProcedure().setName(name).setCatalogName(catalogName).setDbName(dbName) + .setOwnerName(ownerName).setSource(source); + } + + public static StoredProcedure fromThrift(TStoredProcedure procedure) { + return new StoredProcedure(procedure.getName(), procedure.getCatalogName(), procedure.getDbName(), + procedure.getOwnerName(), procedure.source); + } + + @Override + public void write(DataOutput out) throws IOException { + String json = GsonUtils.GSON.toJson(this); + Text.writeString(out, json); + } + + public static StoredProcedure read(DataInput in) throws IOException { + String json = Text.readString(in); + return GsonUtils.GSON.fromJson(json, StoredProcedure.class); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java b/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java index 5f87e553ef0c83..d0c6178d958541 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java +++ b/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java @@ -44,6 +44,9 @@ import org.apache.doris.datasource.InitDatabaseLog; import org.apache.doris.datasource.InitTableLog; import org.apache.doris.ha.MasterInfo; +import org.apache.doris.hplsql.store.HplsqlPackage; +import org.apache.doris.hplsql.store.StoredKey; +import org.apache.doris.hplsql.store.StoredProcedure; import org.apache.doris.journal.bdbje.Timestamp; import org.apache.doris.load.DeleteInfo; import org.apache.doris.load.ExportJob; @@ -825,6 +828,26 @@ public void readFields(DataInput in) throws IOException { isRead = true; break; } + case OperationType.OP_ADD_STORED_PROCEDURE: { + data = StoredProcedure.read(in); + isRead = true; + break; + } + case OperationType.OP_DROP_STORED_PROCEDURE: { + data = StoredKey.read(in); + isRead = true; + break; + } + case OperationType.OP_ADD_HPLSQL_PACKAGE: { + data = HplsqlPackage.read(in); + isRead = true; + break; + } + case OperationType.OP_DROP_HPLSQL_PACKAGE: { + data = StoredKey.read(in); + isRead = true; + break; + } case OperationType.OP_ALTER_DATABASE_PROPERTY: { data = AlterDatabasePropertyInfo.read(in); isRead = true; diff --git a/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapAuthenticate.java b/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapAuthenticate.java index e7644e07169c2a..f11d9813b1cc23 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapAuthenticate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapAuthenticate.java @@ -67,7 +67,7 @@ public static boolean authenticate(ConnectContext context, String password, Stri UserIdentity tempUserIdentity = UserIdentity.createAnalyzedUserIdentWithIp(qualifiedUser, remoteIp); // Search the user in doris. List userIdentities = Env.getCurrentEnv().getAuth() - .getUserIdentityForLdap(qualifiedUser, remoteIp); + .getUserIdentityUncheckPasswd(qualifiedUser, remoteIp); UserIdentity userIdentity; if (userIdentities.isEmpty()) { userIdentity = tempUserIdentity; diff --git a/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapManager.java b/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapManager.java index 4d1404797a293e..adc79e8485bbc6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapManager.java +++ b/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapManager.java @@ -116,7 +116,7 @@ public boolean checkUserPasswd(String fullName, String passwd) { public boolean checkUserPasswd(String fullName, String passwd, String remoteIp, List currentUser) { if (checkUserPasswd(fullName, passwd)) { - currentUser.addAll(Env.getCurrentEnv().getAuth().getUserIdentityForLdap(fullName, remoteIp)); + currentUser.addAll(Env.getCurrentEnv().getAuth().getUserIdentityUncheckPasswd(fullName, remoteIp)); return true; } return false; diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java index 0d24314204b23b..e3b1db7eea2fed 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java @@ -461,6 +461,18 @@ public void sendOnePacket(ByteBuffer packet) throws IOException { } } + public void sendOnePacket(Object[] row) throws IOException { + ByteBuffer packet; + serializer.reset(); + for (Object value : row) { + byte[] bytes = String.valueOf(value).getBytes(); + serializer.writeVInt(bytes.length); + serializer.writeBytes(bytes); + } + packet = serializer.toByteBuffer(); + sendOnePacket(packet); + } + public void sendAndFlush(ByteBuffer packet) throws IOException { sendOnePacket(packet); flush(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/Auth.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/Auth.java index a2ab5a7b78e1f2..48204a5b75d155 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/Auth.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/Auth.java @@ -229,7 +229,7 @@ public void checkPlainPassword(String remoteUser, String remoteHost, String remo } } - public List getUserIdentityForLdap(String remoteUser, String remoteHost) { + public List getUserIdentityUncheckPasswd(String remoteUser, String remoteHost) { return userManager.getUserIdentityUncheckPasswd(remoteUser, remoteHost); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java b/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java index a6c7c9d91b4e8c..3e2aea51ef7230 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java @@ -52,6 +52,9 @@ import org.apache.doris.datasource.InitCatalogLog; import org.apache.doris.datasource.InitDatabaseLog; import org.apache.doris.ha.MasterInfo; +import org.apache.doris.hplsql.store.HplsqlPackage; +import org.apache.doris.hplsql.store.StoredKey; +import org.apache.doris.hplsql.store.StoredProcedure; import org.apache.doris.journal.Journal; import org.apache.doris.journal.JournalCursor; import org.apache.doris.journal.JournalEntity; @@ -1034,6 +1037,22 @@ public static void loadJournal(Env env, Long logId, JournalEntity journal) { env.getAnalysisManager().replayDeleteAnalysisTask((AnalyzeDeletionLog) journal.getData()); break; } + case OperationType.OP_ADD_STORED_PROCEDURE: { + env.getHplsqlManager().replayAddStoredProcedure((StoredProcedure) journal.getData()); + break; + } + case OperationType.OP_DROP_STORED_PROCEDURE: { + env.getHplsqlManager().replayDropStoredProcedure((StoredKey) journal.getData()); + break; + } + case OperationType.OP_ADD_HPLSQL_PACKAGE: { + env.getHplsqlManager().replayAddPackage((HplsqlPackage) journal.getData()); + break; + } + case OperationType.OP_DROP_HPLSQL_PACKAGE: { + env.getHplsqlManager().replayDropPackage((StoredKey) journal.getData()); + break; + } case OperationType.OP_ALTER_DATABASE_PROPERTY: { AlterDatabasePropertyInfo alterDatabasePropertyInfo = (AlterDatabasePropertyInfo) journal.getData(); LOG.info("replay alter database property: {}", alterDatabasePropertyInfo); @@ -1590,6 +1609,22 @@ public void logDropWorkloadGroup(DropWorkloadGroupOperationLog operationLog) { logEdit(OperationType.OP_DROP_WORKLOAD_GROUP, operationLog); } + public void logAddStoredProcedure(StoredProcedure storedProcedure) { + logEdit(OperationType.OP_ADD_STORED_PROCEDURE, storedProcedure); + } + + public void logDropStoredProcedure(StoredKey storedKey) { + logEdit(OperationType.OP_DROP_STORED_PROCEDURE, storedKey); + } + + public void logAddHplsqlPackage(HplsqlPackage pkg) { + logEdit(OperationType.OP_ADD_HPLSQL_PACKAGE, pkg); + } + + public void logDropHplsqlPackage(StoredKey storedKey) { + logEdit(OperationType.OP_DROP_HPLSQL_PACKAGE, storedKey); + } + public void logAlterStoragePolicy(StoragePolicy storagePolicy) { logEdit(OperationType.OP_ALTER_STORAGE_POLICY, storagePolicy); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java b/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java index 2b935e74c559c2..261b86fd61b6ad 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java @@ -313,6 +313,15 @@ public class OperationType { // change an auto increment id for a column public static final short OP_UPDATE_AUTO_INCREMENT_ID = 437; + // hplsql 440 ~ 450 + public static final short OP_ADD_STORED_PROCEDURE = 440; + + public static final short OP_DROP_STORED_PROCEDURE = 441; + + public static final short OP_ADD_HPLSQL_PACKAGE = 442; + + public static final short OP_DROP_HPLSQL_PACKAGE = 443; + /** * Get opcode name by op code. **/ diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java index bdc3a5a2246696..dcfdc1790bdb67 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java +++ b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java @@ -230,6 +230,12 @@ public static MetaPersistMethod create(String name) throws NoSuchMethodException metaPersistMethod.writeMethod = Env.class.getDeclaredMethod("saveAnalysisMgr", CountingDataOutputStream.class, long.class); break; + case "hplsql": + metaPersistMethod.readMethod = Env.class.getDeclaredMethod("loadHplsqlStored", DataInputStream.class, + long.class); + metaPersistMethod.writeMethod = Env.class.getDeclaredMethod("saveHplsqlStored", + CountingDataOutputStream.class, long.class); + break; default: break; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java index 6e99a6757fe1be..e22f48aaba1a23 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java +++ b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java @@ -39,7 +39,7 @@ public class PersistMetaModules { "globalVariable", "cluster", "broker", "resources", "exportJob", "syncJob", "backupHandler", "paloAuth", "transactionState", "colocateTableIndex", "routineLoadJobs", "loadJobV2", "smallFiles", "plugins", "deleteHandler", "sqlBlockRule", "policy", "mtmvJobManager", "globalFunction", "workloadGroups", - "binlogs", "resourceGroups", "AnalysisMgr"); + "binlogs", "resourceGroups", "AnalysisMgr", "hplsql"); // Modules in this list is deprecated and will not be saved in meta file. (also should not be in MODULE_NAMES) public static final ImmutableList DEPRECATED_MODULE_NAMES = ImmutableList.of( diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectContext.java b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectContext.java index dd42960ae47307..29163a891c7061 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectContext.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectContext.java @@ -29,6 +29,7 @@ import org.apache.doris.datasource.CatalogIf; import org.apache.doris.datasource.InternalCatalog; import org.apache.doris.datasource.SessionContext; +import org.apache.doris.hplsql.executor.HplsqlQueryExecutor; import org.apache.doris.mysql.DummyMysqlChannel; import org.apache.doris.mysql.MysqlCapability; import org.apache.doris.mysql.MysqlChannel; @@ -148,6 +149,8 @@ public class ConnectContext { // If set to false, the system will not restrict query resources. private boolean isResourceTagsSet = false; + private HplsqlQueryExecutor hplsqlQueryExecutor = null; + private String sqlHash; private JSONObject minidump = null; @@ -269,6 +272,17 @@ public ConnectContext(StreamConnection connection) { } } + public ConnectContext createContext() { + ConnectContext context = new ConnectContext(); + context.setSessionVariable(sessionVariable); + context.setEnv(env); + context.setCluster(clusterName); + context.setDatabase(currentDb); + context.setQualifiedUser(qualifiedUser); + context.setCurrentUserIdentity(currentUserIdentity); + return context; + } + public boolean isTxnModel() { return txnEntry != null && txnEntry.isTxnModel(); } @@ -530,6 +544,14 @@ public StmtExecutor getExecutor() { return executor; } + public HplsqlQueryExecutor getHplsqlQueryExecutor() { + return hplsqlQueryExecutor; + } + + public void setHplsqlQueryExecutor(HplsqlQueryExecutor hplsqlQueryExecutor) { + this.hplsqlQueryExecutor = hplsqlQueryExecutor; + } + public void cleanup() { if (mysqlChannel != null) { mysqlChannel.close(); @@ -650,7 +672,7 @@ public void checkTimeout(long now) { if (executor != null && executor.isInsertStmt()) { timeoutTag = "insert"; } - //to ms + // to ms long timeout = getExecTimeout() * 1000L; if (delta > timeout) { LOG.warn("kill {} timeout, remote: {}, query timeout: {}", diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java index f190f68280d347..9d102dc4b9fb91 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java @@ -46,6 +46,7 @@ import org.apache.doris.common.util.SqlUtils; import org.apache.doris.common.util.Util; import org.apache.doris.datasource.CatalogIf; +import org.apache.doris.hplsql.executor.HplsqlQueryExecutor; import org.apache.doris.metric.MetricRepo; import org.apache.doris.mysql.MysqlChannel; import org.apache.doris.mysql.MysqlCommand; @@ -259,7 +260,7 @@ private void handleExecute() { ctx.setExecutor(executor); executor.execute(); stmtStr = executeStmt.toSql(); - } catch (Throwable e) { + } catch (Throwable e) { // Catch all throwable. // If reach here, maybe palo bug. LOG.warn("Process one query failed because unknown reason: ", e); @@ -358,7 +359,23 @@ private void handleQuery() { ending--; } String originStmt = new String(bytes, 1, ending, StandardCharsets.UTF_8); + try { + if (ctx.sessionVariable.isEnableHplsql()) { + HplsqlQueryExecutor hplsqlQueryExecutor = ctx.getHplsqlQueryExecutor(); + if (hplsqlQueryExecutor == null) { + hplsqlQueryExecutor = new HplsqlQueryExecutor(this); + ctx.setHplsqlQueryExecutor(hplsqlQueryExecutor); + } + hplsqlQueryExecutor.execute(originStmt); + } else { + executeQuery(originStmt); + } + } catch (Exception ignored) { + // + } + } + public void executeQuery(String originStmt) throws Exception { String sqlHash = DigestUtils.md5Hex(originStmt); ctx.setSqlHash(sqlHash); ctx.getAuditEventBuilder().reset(); @@ -453,8 +470,7 @@ private void handleQuery() { } catch (Throwable throwable) { handleQueryException(throwable, auditStmt, executor.getParsedStmt(), executor.getQueryStatisticsForAuditLog()); - // execute failed, skip remaining stmts - break; + throw throwable; } finally { executor.addProfileToSpan(); } @@ -634,7 +650,7 @@ private ByteBuffer getResultPacket() { // When any request is completed, it will generally need to send a response packet to the client // This method is used to send a response packet to the client - private void finalizeCommand() throws IOException { + public void finalizeCommand() throws IOException { ByteBuffer packet; if (executor != null && executor.isForwardToMaster() && ctx.getState().getStateType() != QueryState.MysqlStateType.ERR) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index 3d9bddeaba91b7..80032b66b5ddf0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -334,6 +334,8 @@ public class SessionVariable implements Serializable, Writable { // Split size for ExternalFileScanNode. Default value 0 means use the block size of HDFS/S3. public static final String FILE_SPLIT_SIZE = "file_split_size"; + public static final String ENABLE_HPL_SQL = "enable_hplsql"; + /** * use insert stmt as the unified backend for all loads */ @@ -991,6 +993,9 @@ public void setMaxJoinNumBushyTree(int maxJoinNumBushyTree) { @VariableMgr.VarAttr(name = FILE_SPLIT_SIZE, needForward = true) public long fileSplitSize = 0; + @VariableMgr.VarAttr(name = ENABLE_HPL_SQL) + public boolean enableHplSql = false; + /** * determine should we enable unified load (use insert stmt as the backend for all load) */ @@ -1957,6 +1962,10 @@ public void checkExternalAggPartitionBits(String externalAggPartitionBits) { } } + public boolean isEnableHplsql() { + return enableHplSql; + } + public boolean isEnableFileCache() { return enableFileCache; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java index 19afa0ecd11a03..f2fd6cfd543e8b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java @@ -1399,7 +1399,11 @@ private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable } } - + if (context.sessionVariable.isEnableHplsql()) { + // hplsql will get the returned results without sending them to mysql client. + // see org/apache/doris/hplsql/executor/DorisRowResult.java + return; + } Span fetchResultSpan = context.getTracer().spanBuilder("fetch result").setParent(Context.current()).startSpan(); try (Scope scope = fetchResultSpan.makeCurrent()) { while (true) { @@ -2547,6 +2551,18 @@ private List convertResultBatchToResultRows(TResultBatch batch) { return resultRows; } + public Coordinator getCoord() { + return coord; + } + + public List getColumns() { + return parsedStmt.getColLabels(); + } + + public List getReturnTypes() { + return exprToType(parsedStmt.getResultExprs()); + } + public SummaryProfile getSummaryProfile() { return profile.getSummaryProfile(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/service/FrontendServiceImpl.java b/fe/fe-core/src/main/java/org/apache/doris/service/FrontendServiceImpl.java index faa79efbb88586..e3750d4573b6ba 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/service/FrontendServiceImpl.java +++ b/fe/fe-core/src/main/java/org/apache/doris/service/FrontendServiceImpl.java @@ -63,6 +63,9 @@ import org.apache.doris.datasource.CatalogIf; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.InternalCatalog; +import org.apache.doris.hplsql.store.HplsqlPackage; +import org.apache.doris.hplsql.store.StoredKey; +import org.apache.doris.hplsql.store.StoredProcedure; import org.apache.doris.master.MasterImpl; import org.apache.doris.mysql.privilege.AccessControllerManager; import org.apache.doris.mysql.privilege.PrivPredicate; @@ -87,6 +90,8 @@ import org.apache.doris.thrift.FrontendServiceVersion; import org.apache.doris.thrift.TAddColumnsRequest; import org.apache.doris.thrift.TAddColumnsResult; +import org.apache.doris.thrift.TAddHplsqlPackageRequest; +import org.apache.doris.thrift.TAddStoredProcedureRequest; import org.apache.doris.thrift.TAutoIncrementRangeRequest; import org.apache.doris.thrift.TAutoIncrementRangeResult; import org.apache.doris.thrift.TBeginTxnRequest; @@ -128,6 +133,8 @@ import org.apache.doris.thrift.TGetTablesResult; import org.apache.doris.thrift.TGetTabletReplicaInfosRequest; import org.apache.doris.thrift.TGetTabletReplicaInfosResult; +import org.apache.doris.thrift.THplsqlPackageRequest; +import org.apache.doris.thrift.THplsqlPackageResult; import org.apache.doris.thrift.TInitExternalCtlMetaRequest; import org.apache.doris.thrift.TInitExternalCtlMetaResult; import org.apache.doris.thrift.TListPrivilegesResult; @@ -165,6 +172,8 @@ import org.apache.doris.thrift.TSnapshotType; import org.apache.doris.thrift.TStatus; import org.apache.doris.thrift.TStatusCode; +import org.apache.doris.thrift.TStoredProcedureRequest; +import org.apache.doris.thrift.TStoredProcedureResult; import org.apache.doris.thrift.TStreamLoadMultiTablePutResult; import org.apache.doris.thrift.TStreamLoadPutRequest; import org.apache.doris.thrift.TStreamLoadPutResult; @@ -2637,6 +2646,101 @@ private TRestoreSnapshotResult restoreSnapshotImpl(TRestoreSnapshotRequest reque return result; } + @Override + public TStoredProcedureResult addStoredProcedure(TAddStoredProcedureRequest request) throws TException { + TStoredProcedureResult result = new TStoredProcedureResult(); + TStatus status = new TStatus(TStatusCode.OK); + result.setStatus(status); + if (!Env.getCurrentEnv().checkFeHost(getClientAddrAsString())) { + status.setStatusCode(TStatusCode.NOT_AUTHORIZED); + status.addToErrorMsgs("addStoredProcedure only accepts requests from fe."); + return result; + } + + if (!request.isSetStoredProcedure()) { + status.setStatusCode(TStatusCode.INVALID_ARGUMENT); + status.addToErrorMsgs("missing stored procedure."); + return result; + } + try { + Env.getCurrentEnv().getHplsqlManager() + .addStoredProcedure(StoredProcedure.fromThrift(request.getStoredProcedure()), + request.isSetIsForce() && request.isIsForce()); + } catch (RuntimeException e) { + status.setStatusCode(TStatusCode.ALREADY_EXIST); + status.addToErrorMsgs(e.getMessage()); + return result; + } + return result; + } + + @Override + public TStoredProcedureResult dropStoredProcedure(TStoredProcedureRequest request) throws TException { + TStoredProcedureResult result = new TStoredProcedureResult(); + TStatus status = new TStatus(TStatusCode.OK); + result.setStatus(status); + if (!Env.getCurrentEnv().checkFeHost(getClientAddrAsString())) { + status.setStatusCode(TStatusCode.NOT_AUTHORIZED); + status.addToErrorMsgs("dropStoredProcedure only accepts requests from fe."); + return result; + } + if (!request.isSetStoredKey()) { + status.setStatusCode(TStatusCode.INVALID_ARGUMENT); + status.addToErrorMsgs("missing stored key."); + return result; + } + + Env.getCurrentEnv().getHplsqlManager().dropStoredProcedure(StoredKey.fromThrift(request.getStoredKey())); + return result; + } + + @Override + public THplsqlPackageResult addHplsqlPackage(TAddHplsqlPackageRequest request) throws TException { + THplsqlPackageResult result = new THplsqlPackageResult(); + TStatus status = new TStatus(TStatusCode.OK); + result.setStatus(status); + if (!Env.getCurrentEnv().checkFeHost(getClientAddrAsString())) { + status.setStatusCode(TStatusCode.NOT_AUTHORIZED); + status.addToErrorMsgs("addHplsqlPackage only accepts requests from fe."); + return result; + } + if (!request.isSetHplsqlPackage()) { + status.setStatusCode(TStatusCode.INVALID_ARGUMENT); + status.addToErrorMsgs("missing hplsql package."); + return result; + } + + try { + Env.getCurrentEnv().getHplsqlManager().addPackage(HplsqlPackage.fromThrift(request.getHplsqlPackage()), + request.isSetIsForce() && request.isIsForce()); + } catch (RuntimeException e) { + status.setStatusCode(TStatusCode.ALREADY_EXIST); + status.addToErrorMsgs(e.getMessage()); + return result; + } + return result; + } + + @Override + public THplsqlPackageResult dropHplsqlPackage(THplsqlPackageRequest request) throws TException { + THplsqlPackageResult result = new THplsqlPackageResult(); + TStatus status = new TStatus(TStatusCode.OK); + result.setStatus(status); + if (!Env.getCurrentEnv().checkFeHost(getClientAddrAsString())) { + status.setStatusCode(TStatusCode.NOT_AUTHORIZED); + status.addToErrorMsgs("dropHplsqlPackage only accepts requests from fe."); + return result; + } + if (!request.isSetStoredKey()) { + status.setStatusCode(TStatusCode.INVALID_ARGUMENT); + status.addToErrorMsgs("missing stored key."); + return result; + } + + Env.getCurrentEnv().getHplsqlManager().dropPackage(StoredKey.fromThrift(request.getStoredKey())); + return result; + } + public TGetMasterTokenResult getMasterToken(TGetMasterTokenRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.debug("receive get master token request: {}", request); diff --git a/fe/fe-core/src/test/java/org/apache/doris/ldap/LdapPrivsCheckerTest.java b/fe/fe-core/src/test/java/org/apache/doris/ldap/LdapPrivsCheckerTest.java index 54a294c8bdf1d5..e0d7463425a6f0 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/ldap/LdapPrivsCheckerTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/ldap/LdapPrivsCheckerTest.java @@ -142,7 +142,7 @@ public void setUp() { minTimes = 0; result = sessionVariable; - auth.getUserIdentityForLdap(USER, IP); + auth.getUserIdentityUncheckPasswd(USER, IP); minTimes = 0; result = Lists.newArrayList(userIdentity); } diff --git a/fe/pom.xml b/fe/pom.xml index 627d95e339d8de..f9aa1df195b277 100644 --- a/fe/pom.xml +++ b/fe/pom.xml @@ -308,6 +308,7 @@ under the License. 6.5.1 2.0.3 1.5.4 + 8.0.33 3.0.0 0.4.0-incubating @@ -1402,6 +1403,17 @@ under the License. ${clickhouse.version} all + + com.oracle.database.jdbc + ojdbc6 + ${ojdbc6.version} + + + + com.mysql + mysql-connector-j + ${mysql-connector-j.version} + joda-time diff --git a/gensrc/thrift/FrontendService.thrift b/gensrc/thrift/FrontendService.thrift index 06c3d96c722ee3..1ef07ad0b18516 100644 --- a/gensrc/thrift/FrontendService.thrift +++ b/gensrc/thrift/FrontendService.thrift @@ -1042,6 +1042,55 @@ struct TRestoreSnapshotResult { 1: optional Status.TStatus status } +struct TStoredProcedure { + 1: optional string name + 2: optional string catalogName + 3: optional string dbName + 4: optional string ownerName + 5: optional string source +} + +struct THplsqlPackage { + 1: optional string name + 2: optional string catalogName + 3: optional string dbName + 4: optional string ownerName + 5: optional string header + 6: optional string body +} + +struct TStoredKey { + 1: optional string name + 2: optional string catalogName + 3: optional string dbName +} + +struct TAddStoredProcedureRequest { + 1: optional TStoredProcedure storedProcedure + 2: optional bool isForce +} + +struct TStoredProcedureRequest { + 1: optional TStoredKey storedKey +} + +struct TStoredProcedureResult { + 1: optional Status.TStatus status +} + +struct TAddHplsqlPackageRequest { + 1: optional THplsqlPackage hplsqlPackage + 2: optional bool isForce +} + +struct THplsqlPackageRequest { + 1: optional TStoredKey storedKey +} + +struct THplsqlPackageResult { + 1: optional Status.TStatus status +} + struct TGetMasterTokenRequest { 1: optional string cluster 2: optional string user @@ -1140,6 +1189,11 @@ service FrontendService { TGetTabletReplicaInfosResult getTabletReplicaInfos(1: TGetTabletReplicaInfosRequest request) + TStoredProcedureResult addStoredProcedure(1: TAddStoredProcedureRequest request) + TStoredProcedureResult dropStoredProcedure(1: TStoredProcedureRequest request) + THplsqlPackageResult addHplsqlPackage(1: TAddHplsqlPackageRequest request) + THplsqlPackageResult dropHplsqlPackage(1: THplsqlPackageRequest request) + TGetMasterTokenResult getMasterToken(1: TGetMasterTokenRequest request) TGetBinlogLagResult getBinlogLag(1: TGetBinlogLagRequest request) diff --git a/regression-test/data/hplsql_p0/test_hplsql.out b/regression-test/data/hplsql_p0/test_hplsql.out new file mode 100644 index 00000000000000..8da53be5ba5b56 --- /dev/null +++ b/regression-test/data/hplsql_p0/test_hplsql.out @@ -0,0 +1,9 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !select -- +false 2 1986 1001 11011903 1243.500 false 1901-12-31 1989-03-21T13:00 wangynnsf 20.268 789.25 string12345 -170141183460469231731687303715884105727 + +-- !select -- +1001 wangjuoo4 +1001 wangynnsf +1002 yunlj8@nk + diff --git a/regression-test/suites/hplsql_p0/test_hplsql.groovy b/regression-test/suites/hplsql_p0/test_hplsql.groovy new file mode 100644 index 00000000000000..53aeebb3a33b8d --- /dev/null +++ b/regression-test/suites/hplsql_p0/test_hplsql.groovy @@ -0,0 +1,73 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_hplsql") { + def tbl = "hplsql_tbl" + sql "DROP TABLE IF EXISTS ${tbl}" + sql """ + create table ${tbl} (id int, name varchar(20)) DUPLICATE key(`id`) distributed by hash (`id`) buckets 4 + properties ("replication_num"="1"); + """ + + sql "set enable_hplsql = true" + sql "declare id INT default = 0;" + sql """ + CREATE OR REPLACE PROCEDURE procedure_demo(IN name STRING, OUT result int) + BEGIN + select k1 into result from test_query_db.test where k7 = name; + END; + """ + sql "call procedure_demo('wangynnsf', id)" + qt_select "select * from test_query_db.test where k1 = id" + + sql """ + CREATE OR REPLACE PROCEDURE cursor_demo() + BEGIN + DECLARE a CHAR(32); + DECLARE b, c INT; + DECLARE cur1 CURSOR FOR SELECT k7, k3 FROM test_query_db.test where k3 > 0 order by k3, k7; + DECLARE cur2 CURSOR FOR SELECT k4 FROM test_query_db.baseall where k4 between 0 and 21011903 order by k4; + + OPEN cur1; + OPEN cur2; + + read_loop: LOOP + FETCH cur1 INTO a, b; + IF(SQLCODE != 0) THEN + LEAVE read_loop; + END IF; + FETCH cur2 INTO c; + IF(SQLCODE != 0) THEN + LEAVE read_loop; + END IF; + IF b < c THEN + INSERT INTO ${tbl} (`name`,`id`) VALUES (a,b); + ELSE + INSERT INTO ${tbl} (`name`, `id`) VALUES (a,c); + END IF; + END LOOP; + + CLOSE cur1; + CLOSE cur2; + END; + """ + + sql "call cursor_demo()" + qt_select """select * from ${tbl} order by 1, 2"""; + + sql "set enable_hplsql = false" +} \ No newline at end of file