diff --git a/.github/workflows/sql-odbc-main.yml b/.github/workflows/sql-odbc-main.yml index 41ca029465..4152aeea2a 100644 --- a/.github/workflows/sql-odbc-main.yml +++ b/.github/workflows/sql-odbc-main.yml @@ -2,6 +2,13 @@ name: Open Distro for Elasticsearch ODBC Driver on: [push, pull_request] +env: + CI_OUTPUT_PATH: "sql-odbc/ci-output" + ODBC_LIB_PATH: "./build/odbc/lib" + ODBC_BIN_PATH: "./build/odbc/bin" + ODBC_BUILD_PATH: "./build/odbc/build" + AWS_SDK_INSTALL_PATH: "./build/aws-sdk/install" + jobs: build-mac: runs-on: macos-latest @@ -43,13 +50,13 @@ jobs: - name: create-output if: success() run: | - mkdir build + mkdir build-output mkdir test-output mkdir installer - cp ./lib64/*.dylib build - cp ./lib64/*.a build - cp $(ls -d bin64/* | grep -v "\.") build - cp ./cmake-build64/*.pkg installer + cp ./build/odbc/lib/*.dylib build-output/ + cp ./build/odbc/lib/*.a build-output/ + cp ./cmake-build64/*.pkg installer/ + # cp $(ls -d ./build/odbc/bin/* | grep -v "\.") build-output # cp ./bin64/*.html test-output # cp ./bin64/*.log test-output - name: upload-build @@ -57,7 +64,7 @@ jobs: uses: actions/upload-artifact@v1 with: name: mac64-build - path: sql-odbc/build + path: sql-odbc/build-output - name: upload-installer if: success() uses: actions/upload-artifact@v1 @@ -85,46 +92,34 @@ jobs: - name: build-installer if: success() run: | - $prefix_path = (pwd).path - cd cmake-build32 - cmake ..\\src -D CMAKE_INSTALL_PREFIX=$prefix_path\AWSSDK\bin -D BUILD_WITH_TESTS=OFF - msbuild .\PACKAGE.vcxproj -p:Configuration=Release - cd .. + .\scripts\build_installer.ps1 Release Win32 .\src $Env:ODBC_BUILD_PATH $Env:AWS_SDK_INSTALL_PATH #- name: test # run: | # cp .\\libraries\\VisualLeakDetector\\bin32\\*.* .\\bin32\\Release # cp .\\libraries\\VisualLeakDetector\\lib32\\*.lib .\\lib32\\Release # .\run_test_runner.bat - - name: create-output + - name: prepare-output if: always() run: | - mkdir build - mkdir test-output - mkdir installer - cp .\\bin32\\Release\\*.dll build - cp .\\bin32\\Release\\*.exe build - cp .\\lib32\\Release\\*.lib build - cp .\\cmake-build32\\*.msi installer - # cp .\\bin32\\Release\\*.log test-output - # cp .\\bin32\\Release\\*.html test-output + .\scripts\prepare_ci_output.ps1 $Env:ODBC_BIN_PATH $Env:ODBC_LIB_PATH $Env:ODBC_BUILD_PATH - name: upload-build if: always() uses: actions/upload-artifact@v1 with: name: windows32-build - path: sql-odbc/build + path: sql-odbc/ci-output/build - name: upload-installer if: always() uses: actions/upload-artifact@v1 with: name: windows32-installer - path: sql-odbc/installer + path: sql-odbc/ci-output/installer #- name: upload-test-results # if: always() # uses: actions/upload-artifact@v1 # with: # name: windows-test-results - # path: test-output + # path: $CI_OUTPUT_PATH/test build-windows64: runs-on: windows-latest defaults: @@ -140,43 +135,31 @@ jobs: - name: build-installer if: success() run: | - $prefix_path = (pwd).path - cd cmake-build64 - cmake ..\\src -D CMAKE_INSTALL_PREFIX=$prefix_path\AWSSDK\bin -D BUILD_WITH_TESTS=OFF - msbuild .\PACKAGE.vcxproj -p:Configuration=Release - cd .. + .\scripts\build_installer.ps1 Release x64 .\src $Env:ODBC_BUILD_PATH $Env:AWS_SDK_INSTALL_PATH #- name: test # run: | # cp .\\libraries\\VisualLeakDetector\\bin64\\*.* .\\bin64\\Release # cp .\\libraries\\VisualLeakDetector\\lib64\\*.lib .\\lib64\\Release # .\run_test_runner.bat - - name: create-output + - name: prepare-output if: always() run: | - mkdir build - mkdir test-output - mkdir installer - cp .\\bin64\\Release\\*.dll build - cp .\\bin64\\Release\\*.exe build - cp .\\lib64\\Release\\*.lib build - cp .\\cmake-build64\\*.msi installer - # cp .\\bin64\\Release\\*.log test-output - # cp .\\bin64\\Release\\*.html test-output + .\scripts\prepare_ci_output.ps1 $Env:ODBC_BIN_PATH $Env:ODBC_LIB_PATH $Env:ODBC_WIN_BUILD_PATH - name: upload-build if: always() uses: actions/upload-artifact@v1 with: name: windows64-build - path: sql-odbc/build + path: sql-odbc/ci-output/build - name: upload-installer if: always() uses: actions/upload-artifact@v1 with: name: windows64-installer - path: sql-odbc/installer + path: sql-odbc/ci-output/installer #- name: upload-test-results # if: always() # uses: actions/upload-artifact@v1 # with: # name: windows-test-results - # path: test-output + # path: sql-odbc/ci-output/test-output diff --git a/.github/workflows/sql-odbc-release-workflow.yml b/.github/workflows/sql-odbc-release-workflow.yml index 02e3cf25a5..62c1f48e50 100644 --- a/.github/workflows/sql-odbc-release-workflow.yml +++ b/.github/workflows/sql-odbc-release-workflow.yml @@ -5,6 +5,13 @@ on: tags: - 'v*' +env: + CI_OUTPUT_PATH: "sql-odbc/ci-output" + ODBC_LIB_PATH: "./build/odbc/lib" + ODBC_BIN_PATH: "./build/odbc/bin" + ODBC_BUILD_PATH: "./build/odbc/build" + AWS_SDK_INSTALL_PATH: "./build/aws-sdk/install" + jobs: build-mac: runs-on: macos-latest @@ -35,23 +42,13 @@ jobs: brew install curl brew install cmake brew install libiodbc - - name: aws-sdk-cpp-setup - run: | - sh aws_sdk_cpp_setup.sh - - name: configure - run: | - prefix_path=$(pwd) - mkdir cmake-build - cd cmake-build - cmake ../src -DCMAKE_INSTALL_PREFIX=$prefix_path/AWSSDK/ -DCMAKE_BUILD_TYPE=Release -DBUILD_ONLY="core" -DCUSTOM_MEMORY_MANAGEMENT="OFF" -DENABLE_RTTI="OFF" -DENABLE_TESTING="OFF" - cd .. - - name: build-driver + - name: configure-and-build-driver run: | - cmake --build cmake-build + ./build_mac_release64.sh - name: build-installer if: success() run: | - cd cmake-build + cd cmake-build64 cmake ../src make cpack . @@ -59,24 +56,24 @@ jobs: - name: create-output if: success() run: | - mkdir build + mkdir build-output mkdir test-output mkdir installer - cp ./lib64/*.dylib build - cp ./lib64/*.a build - cp $(ls -d bin64/* | grep -v "\.") build - cp ./cmake-build/*.pkg installer + cp ./build/odbc/lib/*.dylib build-output/ + cp ./build/odbc/lib/*.a build-output/ + cp ./cmake-build64/*.pkg installer/ + # cp $(ls -d bin64/* | grep -v "\.") build - name: upload-build if: success() uses: actions/upload-artifact@v1 with: - name: mac-build - path: sql-odbc/build + name: mac64-build + path: sql-odbc/build-output - name: upload-installer if: success() uses: actions/upload-artifact@v1 with: - name: mac-installer + name: mac64-installer path: sql-odbc/installer - name: upload-artifacts-s3 if: success() @@ -106,37 +103,28 @@ jobs: - name: build-installer if: success() run: | - cd cmake-build32 - cmake ..\\src -D CMAKE_INSTALL_PREFIX=$prefix_path\AWSSDK\ -D BUILD_WITH_TESTS=OFF - msbuild .\PACKAGE.vcxproj -p:Configuration=Release - cd .. - - name: create-output + .\scripts\build_installer.ps1 Release Win32 .\src $Env:ODBC_BUILD_PATH $Env:AWS_SDK_INSTALL_PATH + - name: prepare-output if: always() run: | - mkdir build - mkdir test-output - mkdir installer - cp .\\bin32\\Release\\*.dll build - cp .\\bin32\\Release\\*.exe build - cp .\\lib32\\Release\\*.lib build - cp .\\cmake-build32\\*.msi installer + .\scripts\prepare_ci_output.ps1 $Env:ODBC_BIN_PATH $Env:ODBC_LIB_PATH $Env:ODBC_BUILD_PATH - name: upload-build if: always() uses: actions/upload-artifact@v1 with: name: windows32-build - path: sql-odbc/build + path: sql-odbc/ci-output/build - name: upload-installer if: always() uses: actions/upload-artifact@v1 with: name: windows32-installer - path: sql-odbc/installer + path: sql-odbc/ci-output/installer - name: upload-artifacts-s3 if: success() shell: bash run: | - cd installer + cd ci-output/installer windows_installer=`ls -1t *.msi | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1` echo $windows_installer aws s3 cp "$windows_installer" s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-odbc/windows/ @@ -161,37 +149,28 @@ jobs: - name: build-installer if: success() run: | - cd cmake-build64 - cmake ..\\src -D CMAKE_INSTALL_PREFIX=$prefix_path\AWSSDK\ -D BUILD_WITH_TESTS=OFF - msbuild .\PACKAGE.vcxproj -p:Configuration=Release - cd .. - - name: create-output + .\scripts\build_installer.ps1 Release x64 .\src $Env:ODBC_BUILD_PATH $Env:AWS_SDK_INSTALL_PATH + - name: prepare-output if: always() run: | - mkdir build - mkdir test-output - mkdir installer - cp .\\bin64\\Release\\*.dll build - cp .\\bin64\\Release\\*.exe build - cp .\\lib64\\Release\\*.lib build - cp .\\cmake-build64\\*.msi installer + .\scripts\prepare_ci_output.ps1 $Env:ODBC_BIN_PATH $Env:ODBC_LIB_PATH $Env:ODBC_WIN_BUILD_PATH - name: upload-build if: always() uses: actions/upload-artifact@v1 with: name: windows64-build - path: sql-odbc/build + path: sql-odbc/ci-output/build - name: upload-installer if: always() uses: actions/upload-artifact@v1 with: name: windows64-installer - path: sql-odbc/installer + path: sql-odbc/ci-output/installer - name: upload-artifacts-s3 if: success() shell: bash run: | - cd installer + cd ci-output/installer windows_installer=`ls -1t *.msi | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1` echo $windows_installer aws s3 cp "$windows_installer" s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-odbc/windows/ diff --git a/.github/workflows/sql-release-workflow.yml b/.github/workflows/sql-release-workflow.yml index ff35711e5b..d69c365ef1 100644 --- a/.github/workflows/sql-release-workflow.yml +++ b/.github/workflows/sql-release-workflow.yml @@ -32,7 +32,7 @@ jobs: - name: Run build run: | - ./gradlew build buildDeb buildRpm --no-daemon --refresh-dependencies --console=plain -Dbuild.snapshot=false + ./gradlew build buildDeb buildRpm --no-daemon --refresh-dependencies --console=plain -Dbuild.snapshot=false -x doctest:doctest artifact=`ls plugin/build/distributions/*.zip` rpm_artifact=`ls plugin/build/distributions/*.rpm` deb_artifact=`ls plugin/build/distributions/*.deb` diff --git a/build.gradle b/build.gradle index f45ea2b1a7..0940c40df7 100644 --- a/build.gradle +++ b/build.gradle @@ -15,7 +15,7 @@ buildscript { ext { - es_version = "7.8.0" + es_version = "7.9.0" } repositories { @@ -43,12 +43,12 @@ repositories { } ext { - opendistroVersion = '1.9.0' + opendistroVersion = '1.10.0' isSnapshot = "true" == System.getProperty("build.snapshot", "true") } allprojects { - version = "${opendistroVersion}.1" + version = "${opendistroVersion}.0" plugins.withId('java') { sourceCompatibility = targetCompatibility = "1.8" diff --git a/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/setting/Settings.java b/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/setting/Settings.java index c90b975a82..55e3b7b1ca 100644 --- a/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/setting/Settings.java +++ b/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/setting/Settings.java @@ -26,7 +26,17 @@ public abstract class Settings { @RequiredArgsConstructor public enum Key { - PPL_QUERY_MEMORY_LIMIT("opendistro.ppl.query.memory_limit"); + /** + * PPL Setting. + */ + PPL_QUERY_MEMORY_LIMIT("opendistro.ppl.query.memory_limit"), + + PPL_ENABLED("opendistro.ppl.enabled"), + + /** + * Common Setting for SQL and PPL. + */ + QUERY_SIZE_LIMIT("opendistro.query.size_limit"); @Getter private final String keyValue; diff --git a/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/utils/StringUtils.java b/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/utils/StringUtils.java index 1df4b26758..51a757506e 100644 --- a/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/utils/StringUtils.java +++ b/common/src/main/java/com/amazon/opendistroforelasticsearch/sql/common/utils/StringUtils.java @@ -19,13 +19,13 @@ public class StringUtils { /** - * Unquote Identifier with mark. + * Unquote any string with mark specified. * @param text string * @param mark quotation mark * @return An unquoted string whose outer pair of (single/double/back-tick) quotes have been * removed */ - public static String unquoteIdentifier(String text, String mark) { + public static String unquote(String text, String mark) { if (isQuoted(text, mark)) { return text.substring(mark.length(), text.length() - mark.length()); } @@ -38,7 +38,7 @@ public static String unquoteIdentifier(String text, String mark) { * @return An unquoted string whose outer pair of (single/double/back-tick) quotes have been * removed */ - public static String unquoteIdentifier(String text) { + public static String unquoteText(String text) { if (isQuoted(text, "\"") || isQuoted(text, "'") || isQuoted(text, "`")) { return text.substring(1, text.length() - 1); } else { @@ -46,6 +46,20 @@ public static String unquoteIdentifier(String text) { } } + /** + * Unquote Identifier which has " or ` as mark. + * @param identifier identifier that possibly enclosed by double quotes or back ticks + * @return An unquoted string whose outer pair of (double/back-tick) quotes have been + * removed + */ + public static String unquoteIdentifier(String identifier) { + if (isQuoted(identifier, "\"") || isQuoted(identifier, "`")) { + return identifier.substring(1, identifier.length() - 1); + } else { + return identifier; + } + } + private static boolean isQuoted(String text, String mark) { return !Strings.isNullOrEmpty(text) && text.startsWith(mark) && text.endsWith(mark); } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/Analyzer.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/Analyzer.java index 03202b34ff..bc40def04f 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/Analyzer.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/Analyzer.java @@ -40,6 +40,7 @@ import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.LiteralExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalAggregation; @@ -62,7 +63,6 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; -import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; @@ -70,11 +70,25 @@ * Analyze the {@link UnresolvedPlan} in the {@link AnalysisContext} to construct the {@link * LogicalPlan}. */ -@RequiredArgsConstructor public class Analyzer extends AbstractNodeVisitor { + private final ExpressionAnalyzer expressionAnalyzer; + + private final SelectExpressionAnalyzer selectExpressionAnalyzer; + private final StorageEngine storageEngine; + /** + * Constructor. + */ + public Analyzer( + ExpressionAnalyzer expressionAnalyzer, + StorageEngine storageEngine) { + this.expressionAnalyzer = expressionAnalyzer; + this.storageEngine = storageEngine; + this.selectExpressionAnalyzer = new SelectExpressionAnalyzer(expressionAnalyzer); + } + public LogicalPlan analyze(UnresolvedPlan unresolved, AnalysisContext context) { return unresolved.accept(this, context); } @@ -110,8 +124,11 @@ public LogicalPlan visitRename(Rename node, AnalysisContext context) { ReferenceExpression target = new ReferenceExpression(((Field) renameMap.getTarget()).getField().toString(), origin.type()); - context.peek().define(target); - renameMapBuilder.put(DSL.ref(origin.toString(), origin.type()), target); + ReferenceExpression originExpr = DSL.ref(origin.toString(), origin.type()); + TypeEnvironment curEnv = context.peek(); + curEnv.remove(originExpr); + curEnv.define(target); + renameMapBuilder.put(originExpr, target); } else { throw new SemanticCheckException( String.format("the target expected to be field, but is %s", renameMap.getTarget())); @@ -126,17 +143,27 @@ public LogicalPlan visitRename(Rename node, AnalysisContext context) { */ @Override public LogicalPlan visitAggregation(Aggregation node, AnalysisContext context) { - LogicalPlan child = node.getChild().get(0).accept(this, context); + final LogicalPlan child = node.getChild().get(0).accept(this, context); ImmutableList.Builder aggregatorBuilder = new ImmutableList.Builder<>(); for (UnresolvedExpression expr : node.getAggExprList()) { aggregatorBuilder.add((Aggregator) expressionAnalyzer.analyze(expr, context)); } + ImmutableList aggregators = aggregatorBuilder.build(); ImmutableList.Builder groupbyBuilder = new ImmutableList.Builder<>(); for (UnresolvedExpression expr : node.getGroupExprList()) { groupbyBuilder.add(expressionAnalyzer.analyze(expr, context)); } - return new LogicalAggregation(child, aggregatorBuilder.build(), groupbyBuilder.build()); + ImmutableList groupBys = groupbyBuilder.build(); + + // new context + context.push(); + TypeEnvironment newEnv = context.peek(); + aggregators.forEach(aggregator -> newEnv.define(new Symbol(Namespace.FIELD_NAME, + aggregator.toString()), aggregator.type())); + groupBys.forEach(group -> newEnv.define(new Symbol(Namespace.FIELD_NAME, + group.toString()), group.type())); + return new LogicalAggregation(child, aggregators, groupBys); } /** @@ -158,18 +185,24 @@ public LogicalPlan visitProject(Project node, AnalysisContext context) { Argument argument = node.getArgExprList().get(0); Boolean exclude = (Boolean) argument.getValue().getValue(); if (exclude) { + TypeEnvironment curEnv = context.peek(); List referenceExpressions = node.getProjectList().stream() .map(expr -> (ReferenceExpression) expressionAnalyzer.analyze(expr, context)) .collect(Collectors.toList()); + referenceExpressions.forEach(ref -> curEnv.remove(ref)); return new LogicalRemove(child, ImmutableSet.copyOf(referenceExpressions)); } } - List expressions = node.getProjectList().stream() - .map(expr -> expressionAnalyzer.analyze(expr, context)) - .collect(Collectors.toList()); - return new LogicalProject(child, expressions); + List namedExpressions = + selectExpressionAnalyzer.analyze(node.getProjectList(), context); + // new context + context.push(); + TypeEnvironment newEnv = context.peek(); + namedExpressions.forEach(expr -> newEnv.define(new Symbol(Namespace.FIELD_NAME, + expr.getName()), expr.type())); + return new LogicalProject(child, namedExpressions); } /** diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java index 2f9710b7b6..30936150aa 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java @@ -19,6 +19,7 @@ import com.amazon.opendistroforelasticsearch.sql.analysis.symbol.Symbol; import com.amazon.opendistroforelasticsearch.sql.ast.AbstractNodeVisitor; import com.amazon.opendistroforelasticsearch.sql.ast.expression.AggregateFunction; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias; import com.amazon.opendistroforelasticsearch.sql.ast.expression.And; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Compare; import com.amazon.opendistroforelasticsearch.sql.ast.expression.EqualTo; @@ -27,10 +28,13 @@ import com.amazon.opendistroforelasticsearch.sql.ast.expression.Literal; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Not; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Or; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.QualifiedName; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedAttribute; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Xor; +import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; @@ -149,10 +153,33 @@ public Expression visitField(Field node, AnalysisContext context) { return visitIdentifier(attr, context); } + @Override + public Expression visitQualifiedName(QualifiedName node, AnalysisContext context) { + // Name with qualifier (index.field, index_alias.field, object/nested.inner_field + // text.keyword) is not supported for now + if (node.getParts().size() > 1) { + throw new SyntaxCheckException(String.format( + "Qualified name [%s] is not supported yet", node)); + } + return visitIdentifier(node.toString(), context); + } + private Expression visitIdentifier(String ident, AnalysisContext context) { TypeEnvironment typeEnv = context.peek(); ReferenceExpression ref = DSL.ref(ident, typeEnv.resolve(new Symbol(Namespace.FIELD_NAME, ident))); + + // Fall back to old engine too if type is not supported semantically + if (isTypeNotSupported(ref.type())) { + throw new SyntaxCheckException(String.format( + "Identifier [%s] of type [%s] is not supported yet", ident, ref.type())); + } return ref; } + + private boolean isTypeNotSupported(ExprType type) { + return "struct".equalsIgnoreCase(type.typeName()) + || "array".equalsIgnoreCase(type.typeName()); + } + } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectExpressionAnalyzer.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectExpressionAnalyzer.java new file mode 100644 index 0000000000..bfb9216020 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectExpressionAnalyzer.java @@ -0,0 +1,79 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.analysis; + +import com.amazon.opendistroforelasticsearch.sql.analysis.symbol.Namespace; +import com.amazon.opendistroforelasticsearch.sql.ast.AbstractNodeVisitor; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Field; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; +import com.google.common.collect.ImmutableList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; + +/** + * Analyze the select list in the {@link AnalysisContext} to construct the list of + * {@link NamedExpression}. + */ +@RequiredArgsConstructor +public class SelectExpressionAnalyzer + extends + AbstractNodeVisitor, AnalysisContext> { + private final ExpressionAnalyzer expressionAnalyzer; + + /** + * Analyze Select fields. + */ + public List analyze(List selectList, + AnalysisContext analysisContext) { + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + for (UnresolvedExpression unresolvedExpression : selectList) { + builder.addAll(unresolvedExpression.accept(this, analysisContext)); + } + return builder.build(); + } + + @Override + public List visitField(Field node, AnalysisContext context) { + return Collections.singletonList(DSL.named(node.accept(expressionAnalyzer, context))); + } + + @Override + public List visitAlias(Alias node, AnalysisContext context) { + return Collections.singletonList(DSL.named(node.getName(), + node.getDelegated().accept(expressionAnalyzer, context), + node.getAlias())); + } + + @Override + public List visitAllFields(AllFields node, + AnalysisContext context) { + TypeEnvironment environment = context.peek(); + Map lookupAllFields = environment.lookupAllFields(Namespace.FIELD_NAME); + return lookupAllFields.entrySet().stream().map(entry -> DSL.named(entry.getKey(), + new ReferenceExpression(entry.getKey(), entry.getValue()))).collect(Collectors.toList()); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/TypeEnvironment.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/TypeEnvironment.java index 2262b05598..07849f92d9 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/TypeEnvironment.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/TypeEnvironment.java @@ -23,6 +23,8 @@ import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import java.util.HashMap; +import java.util.Map; import java.util.Optional; import lombok.Getter; @@ -62,6 +64,17 @@ public ExprType resolve(Symbol symbol) { String.format("can't resolve %s in type env", symbol)); } + /** + * Resolve all fields in the current environment. + * @param namespace a namespace + * @return all symbols in the namespace + */ + public Map lookupAllFields(Namespace namespace) { + Map result = new HashMap<>(); + symbolTable.lookupAllFields(namespace).forEach(result::putIfAbsent); + return result; + } + /** * Define symbol with the type. * @@ -81,4 +94,14 @@ public void define(ReferenceExpression ref) { define(new Symbol(Namespace.FIELD_NAME, ref.getAttr()), ref.type()); } + public void remove(Symbol symbol) { + symbolTable.remove(symbol); + } + + /** + * Remove ref. + */ + public void remove(ReferenceExpression ref) { + remove(new Symbol(Namespace.FIELD_NAME, ref.getAttr())); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTable.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTable.java index 35acd06163..18daa49b10 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTable.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTable.java @@ -24,6 +24,7 @@ import java.util.NavigableMap; import java.util.Optional; import java.util.TreeMap; +import java.util.stream.Collectors; /** * Symbol table for symbol definition and resolution. @@ -49,6 +50,19 @@ public void store(Symbol symbol, ExprType type) { ).put(symbol.getName(), type); } + /** + * Remove a symbol from SymbolTable. + */ + public void remove(Symbol symbol) { + tableByNamespace.computeIfPresent( + symbol.getNamespace(), + (k, v) -> { + v.remove(symbol.getName()); + return v; + } + ); + } + /** * Look up symbol in the namespace map. * @@ -78,6 +92,21 @@ public Map lookupByPrefix(Symbol prefix) { return emptyMap(); } + /** + * Look up all top level symbols in the namespace. + * this function is mainly used by SELECT * use case to get the top level fields + * Todo. currently, the top level fields is the field which doesn't include "." in the name. + * + * @param namespace a namespace + * @return all symbols in the namespace map + */ + public Map lookupAllFields(Namespace namespace) { + return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()) + .entrySet().stream() + .filter(entry -> !entry.getKey().contains(".")) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + /** * Check if namespace map in empty (none definition). * diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/AbstractNodeVisitor.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/AbstractNodeVisitor.java index cb962c241a..02806fbcfd 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/AbstractNodeVisitor.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/AbstractNodeVisitor.java @@ -16,6 +16,8 @@ package com.amazon.opendistroforelasticsearch.sql.ast; import com.amazon.opendistroforelasticsearch.sql.ast.expression.AggregateFunction; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; import com.amazon.opendistroforelasticsearch.sql.ast.expression.And; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Argument; import com.amazon.opendistroforelasticsearch.sql.ast.expression.AttributeList; @@ -178,4 +180,12 @@ public T visitDedupe(Dedupe node, C context) { public T visitValues(Values node, C context) { return visitChildren(node, context); } + + public T visitAlias(Alias node, C context) { + return visitChildren(node, context); + } + + public T visitAllFields(AllFields node, C context) { + return visitChildren(node, context); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java index 4c62c119d4..4b85c9b2bf 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java @@ -16,6 +16,8 @@ package com.amazon.opendistroforelasticsearch.sql.ast.dsl; import com.amazon.opendistroforelasticsearch.sql.ast.expression.AggregateFunction; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; import com.amazon.opendistroforelasticsearch.sql.ast.expression.And; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Argument; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Compare; @@ -43,7 +45,6 @@ import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort; import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Values; -import com.google.common.collect.ImmutableList; import java.util.Arrays; import java.util.List; import lombok.experimental.UtilityClass; @@ -226,6 +227,14 @@ public static Field field(String field, List fieldArgs) { return new Field(field, fieldArgs); } + public Alias alias(String name, UnresolvedExpression expr) { + return new Alias(name, expr); + } + + public Alias alias(String name, UnresolvedExpression expr, String alias) { + return new Alias(name, expr, alias); + } + public static List exprList(UnresolvedExpression... exprList) { return Arrays.asList(exprList); } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/Alias.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/Alias.java new file mode 100644 index 0000000000..bcdac6e607 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/Alias.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.ast.expression; + +import com.amazon.opendistroforelasticsearch.sql.ast.AbstractNodeVisitor; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.ToString; + +/** + * Alias abstraction that associate an unnamed expression with a name and an optional alias. + * The name and alias information preserved is useful for semantic analysis and response + * formatting eventually. This can avoid restoring the info in toString() method which is + * inaccurate because original info is already lost. + */ +@AllArgsConstructor +@EqualsAndHashCode(callSuper = false) +@Getter +@RequiredArgsConstructor +@ToString +public class Alias extends UnresolvedExpression { + + /** + * Original field name. + */ + private final String name; + + /** + * Expression aliased. + */ + private final UnresolvedExpression delegated; + + /** + * Optional field alias. + */ + private String alias; + + @Override + public T accept(AbstractNodeVisitor nodeVisitor, C context) { + return nodeVisitor.visitAlias(this, context); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/AllFields.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/AllFields.java new file mode 100644 index 0000000000..13d487a3f8 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/AllFields.java @@ -0,0 +1,43 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.ast.expression; + +import com.amazon.opendistroforelasticsearch.sql.ast.AbstractNodeVisitor; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** + * Represent the All fields which is been used in SELECT *. + */ +@ToString +@EqualsAndHashCode(callSuper = false) +public class AllFields extends UnresolvedExpression { + public static final AllFields INSTANCE = new AllFields(); + + private AllFields() { + } + + public static AllFields of() { + return INSTANCE; + } + + @Override + public R accept(AbstractNodeVisitor nodeVisitor, C context) { + return nodeVisitor.visitAllFields(this, context); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/tree/Project.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/tree/Project.java index c11018192f..e7f58ddd1e 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/tree/Project.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/tree/Project.java @@ -52,6 +52,17 @@ public boolean hasArgument() { return !argExprList.isEmpty(); } + /** + * The Project could been used to exclude fields from the source. + */ + public boolean isExcluded() { + if (hasArgument()) { + Argument argument = argExprList.get(0); + return (Boolean) argument.getValue().getValue(); + } + return false; + } + @Override public Project attach(UnresolvedPlan child) { this.child = child; diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprNumberValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprNumberValue.java index e5214aa8a5..fdf68b9ad8 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprNumberValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprNumberValue.java @@ -27,6 +27,11 @@ public abstract class AbstractExprNumberValue extends AbstractExprValue { private final Number value; + @Override + public boolean isNumber() { + return true; + } + @Override public Integer integerValue() { return value.intValue(); diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprValue.java index 1e6e51a336..5774c314b4 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprValue.java @@ -28,18 +28,18 @@ public abstract class AbstractExprValue implements ExprValue { */ @Override public int compareTo(ExprValue other) { - if (this.isNull() || this.isMissing()) { - return this.compare(other); - } else if (other.isNull() || other.isMissing()) { - return -other.compareTo(this); + if (this.isNull() || this.isMissing() || other.isNull() || other.isMissing()) { + throw new IllegalStateException( + String.format("[BUG] Unreachable, Comparing with NULL or MISSING is undefined")); } - if (!this.type().equals(other.type())) { + if ((this.isNumber() && other.isNumber()) || this.type() == other.type()) { + return compare(other); + } else { throw new ExpressionEvaluationException( String.format( "compare expected value have same type, but with [%s, %s]", this.type(), other.type())); } - return compare(other); } /** diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java index 7eb29ab8f0..635d8f2fd4 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java @@ -15,32 +15,32 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import java.util.Objects; /** * Expression Missing Value. - * Missing value only equal to missing value, and is smaller than any other value. */ public class ExprMissingValue extends AbstractExprValue { - private static final ExprValue instance = new ExprMissingValue(); + private static final ExprMissingValue instance = new ExprMissingValue(); private ExprMissingValue() { } - public static ExprValue of() { + public static ExprMissingValue of() { return instance; } @Override public Object value() { - throw new ExpressionEvaluationException("invalid to call value operation on missing value"); + return null; } @Override public ExprType type() { - throw new ExpressionEvaluationException("invalid to call type operation on missing value"); + return ExprCoreType.UNKNOWN; } @Override @@ -48,18 +48,15 @@ public boolean isMissing() { return true; } - /** - * When MISSING value compare to other expression value. - * 1) MISSING is equal to MISSING. - * 2) MISSING is less than all other expression values. - */ @Override public int compare(ExprValue other) { - return other.isMissing() ? 0 : -1; + throw new IllegalStateException(String.format("[BUG] Unreachable, Comparing with MISSING is " + + "undefined")); } /** * Missing value is equal to Missing value. + * Notes, this function should only used for Java Object Compare. */ @Override public boolean equal(ExprValue other) { @@ -70,4 +67,9 @@ public boolean equal(ExprValue other) { public int hashCode() { return Objects.hashCode("MISSING"); } + + @Override + public String toString() { + return "MISSING"; + } } \ No newline at end of file diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java index 64d035e82f..2638e62537 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java @@ -15,19 +15,15 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; -import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import java.util.Objects; /** * Expression Null Value. - * Null value - *
  • equal to null value. - *
  • large than missing value. - *
  • less than any other value. */ public class ExprNullValue extends AbstractExprValue { - private static final ExprValue instance = new ExprNullValue(); + private static final ExprNullValue instance = new ExprNullValue(); private ExprNullValue() { } @@ -37,7 +33,12 @@ public int hashCode() { return Objects.hashCode("NULL"); } - public static ExprValue of() { + @Override + public String toString() { + return "NULL"; + } + + public static ExprNullValue of() { return instance; } @@ -48,7 +49,7 @@ public Object value() { @Override public ExprType type() { - throw new ExpressionEvaluationException("invalid to call type operation on null value"); + return ExprCoreType.UNKNOWN; } @Override @@ -56,23 +57,18 @@ public boolean isNull() { return true; } - /** - * When NULL value compare to other expression value. - * 1) NULL is equal to NULL. - * 2) NULL is large than MISSING. - * 3) NULL is less than all other expression values. - */ @Override public int compare(ExprValue other) { - return other.isNull() ? 0 : other.isMissing() ? 1 : -1; + throw new IllegalStateException( + String.format("[BUG] Unreachable, Comparing with NULL is undefined")); } /** * NULL value is equal to NULL value. + * Notes, this function should only used for Java Object Compare. */ @Override public boolean equal(ExprValue other) { return other.isNull(); } - } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java index fc005a3301..f902e4d7a8 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java @@ -58,6 +58,15 @@ default boolean isMissing() { return false; } + /** + * Is Number value. + * + * @return true: is number value, otherwise false + */ + default boolean isNumber() { + return false; + } + /** * Get the {@link BindingTuple}. */ diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/WideningTypeRule.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/WideningTypeRule.java index 26d4950d1f..df4033ec0b 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/WideningTypeRule.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/WideningTypeRule.java @@ -52,10 +52,10 @@ public static int distance(ExprType type1, ExprType type2) { } private static int distance(ExprType type1, ExprType type2, int distance) { - if (type1 == UNKNOWN) { - return IMPOSSIBLE_WIDENING; - } else if (type1 == type2) { + if (type1 == type2) { return distance; + } else if (type1 == UNKNOWN) { + return IMPOSSIBLE_WIDENING; } else { return type1.getParent().stream() .map(parentOfType1 -> distance(parentOfType1, type2, distance + 1)) diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/executor/ExecutionEngine.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/executor/ExecutionEngine.java index ec6e5bfbf2..d75379ee1e 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/executor/ExecutionEngine.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/executor/ExecutionEngine.java @@ -18,6 +18,7 @@ import com.amazon.opendistroforelasticsearch.sql.common.response.ResponseListener; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; import java.util.List; import lombok.Data; @@ -40,7 +41,20 @@ public interface ExecutionEngine { */ @Data class QueryResponse { + private final Schema schema; private final List results; } + @Data + class Schema { + private final List columns; + + @Data + public static class Column { + private final String name; + private final String alias; + private final ExprType exprType; + } + } + } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java index 34a648b385..76a0087557 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java @@ -75,6 +75,30 @@ public static ReferenceExpression ref(String ref, ExprType type) { return new ReferenceExpression(ref, type); } + /** + * Wrap a named expression if not yet. The intent is that different languages may use + * Alias or not when building AST. This caused either named or unnamed expression + * is resolved by analyzer. To make unnamed expression acceptable for logical project, + * it is required to wrap it by named expression here before passing to logical project. + * + * @param expression expression + * @return expression if named already or expression wrapped by named expression. + */ + public static NamedExpression named(Expression expression) { + if (expression instanceof NamedExpression) { + return (NamedExpression) expression; + } + return named(expression.toString(), expression); + } + + public static NamedExpression named(String name, Expression expression) { + return new NamedExpression(name, expression); + } + + public static NamedExpression named(String name, Expression expression, String alias) { + return new NamedExpression(name, expression, alias); + } + public FunctionExpression abs(Expression... expressions) { return function(BuiltinFunctionName.ABS, expressions); } @@ -268,6 +292,10 @@ public FunctionExpression like(Expression... expressions) { return function(BuiltinFunctionName.LIKE, expressions); } + public FunctionExpression notLike(Expression... expressions) { + return function(BuiltinFunctionName.NOT_LIKE, expressions); + } + public Aggregator avg(Expression... expressions) { return aggregate(BuiltinFunctionName.AVG, expressions); } @@ -289,4 +317,12 @@ private Aggregator aggregate(BuiltinFunctionName functionName, Expression... exp return (Aggregator) repository.compile( functionName.getName(), Arrays.asList(expressions)); } + + public FunctionExpression isnull(Expression... expressions) { + return function(BuiltinFunctionName.IS_NULL, expressions); + } + + public FunctionExpression isnotnull(Expression... expressions) { + return function(BuiltinFunctionName.IS_NOT_NULL, expressions); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java index c4348ddfcf..90e411c5bf 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java @@ -34,4 +34,15 @@ public interface Expression extends Serializable { * The type of the expression. */ ExprType type(); + + /** + * Accept a visitor to visit current expression node. + * @param visitor visitor + * @param context context + * @param result type + * @param context type + * @return result accumulated by visitor when visiting + */ + T accept(ExpressionNodeVisitor visitor, C context); + } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionNodeVisitor.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionNodeVisitor.java new file mode 100644 index 0000000000..f2b1618357 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionNodeVisitor.java @@ -0,0 +1,77 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression; + +import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionImplementation; + +/** + * Abstract visitor for expression tree nodes. + * @param type of return value to accumulate when visiting. + * @param type of context. + */ +public abstract class ExpressionNodeVisitor { + + public T visitNode(Expression node, C context) { + return null; + } + + /** + * Visit children nodes in function arguments. + * @param node function node + * @param context context + * @return result + */ + public T visitChildren(FunctionImplementation node, C context) { + T result = defaultResult(); + + for (Expression child : node.getArguments()) { + T childResult = child.accept(this, context); + result = aggregateResult(result, childResult); + } + return result; + } + + private T defaultResult() { + return null; + } + + private T aggregateResult(T aggregate, T nextResult) { + return nextResult; + } + + public T visitLiteral(LiteralExpression node, C context) { + return visitNode(node, context); + } + + public T visitNamed(NamedExpression node, C context) { + return visitNode(node, context); + } + + public T visitReference(ReferenceExpression node, C context) { + return visitNode(node, context); + } + + public T visitFunction(FunctionExpression node, C context) { + return visitChildren(node, context); + } + + public T visitAggregator(Aggregator node, C context) { + return visitChildren(node, context); + } + +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/FunctionExpression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/FunctionExpression.java index 2969ee677f..d9ef4d4ba3 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/FunctionExpression.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/FunctionExpression.java @@ -35,4 +35,10 @@ public abstract class FunctionExpression implements Expression, FunctionImplemen @Getter private final List arguments; + + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return visitor.visitFunction(this, context); + } + } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/LiteralExpression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/LiteralExpression.java index 685639e655..3515e54507 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/LiteralExpression.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/LiteralExpression.java @@ -39,6 +39,11 @@ public ExprType type() { return exprValue.type(); } + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return visitor.visitLiteral(this, context); + } + @Override public String toString() { return exprValue.toString(); diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/NamedExpression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/NamedExpression.java new file mode 100644 index 0000000000..da38d4b9cc --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/NamedExpression.java @@ -0,0 +1,79 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import com.google.common.base.Strings; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.ToString; + +/** + * Named expression that represents expression with name. + * Please see more details in associated unresolved expression operator + * {@link com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias}. + */ +@AllArgsConstructor +@EqualsAndHashCode +@RequiredArgsConstructor +@ToString +public class NamedExpression implements Expression { + + /** + * Expression name. + */ + private final String name; + + /** + * Expression that being named. + */ + private final Expression delegated; + + /** + * Optional alias. + */ + @Getter + private String alias; + + @Override + public ExprValue valueOf(Environment valueEnv) { + return delegated.valueOf(valueEnv); + } + + @Override + public ExprType type() { + return delegated.type(); + } + + /** + * Get expression name using name or its alias (if it's present). + * @return expression name + */ + public String getName() { + return Strings.isNullOrEmpty(alias) ? name : alias; + } + + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return visitor.visitNamed(this, context); + } + +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java index 6d5ba349e7..ba3a8f4e87 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java @@ -40,6 +40,11 @@ public ExprType type() { return type; } + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return visitor.visitReference(this, context); + } + @Override public String toString() { return attr; diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/aggregation/Aggregator.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/aggregation/Aggregator.java index 9d3653b4c7..c55f81f09e 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/aggregation/Aggregator.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/aggregation/Aggregator.java @@ -21,6 +21,7 @@ import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionNodeVisitor; import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionImplementation; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; @@ -70,4 +71,10 @@ public ExprValue valueOf(Environment valueEnv) { public ExprType type() { return returnType; } + + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return visitor.visitAggregator(this, context); + } + } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java index a17b5d65d2..f38f3c5848 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java @@ -76,6 +76,7 @@ public enum BuiltinFunctionName { GREATER(FunctionName.of(">")), GTE(FunctionName.of(">=")), LIKE(FunctionName.of("like")), + NOT_LIKE(FunctionName.of("not like")), /** * Date and Time Functions. @@ -87,7 +88,13 @@ public enum BuiltinFunctionName { */ AVG(FunctionName.of("avg")), SUM(FunctionName.of("sum")), - COUNT(FunctionName.of("count")); + COUNT(FunctionName.of("count")), + + /** + * NULL Test. + */ + IS_NULL(FunctionName.of("is null")), + IS_NOT_NULL(FunctionName.of("is not null")); private final FunctionName name; diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionDSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionDSL.java index 841027656f..39107361f4 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionDSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionDSL.java @@ -167,4 +167,20 @@ public SerializableFunction nullMissingHandling( } }; } + + /** + * Wrapper the binary ExprValue function with default NULL and MISSING handling. + */ + public SerializableBiFunction nullMissingHandling( + SerializableBiFunction function) { + return (v1, v2) -> { + if (v1.isMissing() || v2.isMissing()) { + return ExprValueUtils.missingValue(); + } else if (v1.isNull() || v2.isNull()) { + return ExprValueUtils.nullValue(); + } else { + return function.apply(v1, v2); + } + }; + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java index edfd60c153..a45699546a 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java @@ -20,35 +20,18 @@ import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_TRUE; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; -import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.binaryOperator; -import static com.amazon.opendistroforelasticsearch.sql.utils.OperatorUtils.matches; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; -import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; -import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; -import com.amazon.opendistroforelasticsearch.sql.expression.Expression; -import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; -import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionBuilder; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionResolver; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionSignature; -import com.google.common.collect.ImmutableMap; +import com.amazon.opendistroforelasticsearch.sql.utils.OperatorUtils; import com.google.common.collect.ImmutableTable; import com.google.common.collect.Table; -import java.util.Arrays; -import java.util.Map; -import java.util.function.BiFunction; import java.util.stream.Collectors; import lombok.experimental.UtilityClass; @@ -77,6 +60,7 @@ public static void register(BuiltinFunctionRepository repository) { repository.register(greater()); repository.register(gte()); repository.register(like()); + repository.register(notLike()); } /** @@ -164,39 +148,28 @@ public static void register(BuiltinFunctionRepository repository) { .build(); private static FunctionResolver and() { - FunctionName functionName = BuiltinFunctionName.AND.getName(); - return FunctionResolver.builder() - .functionName(functionName) - .functionBundle(new FunctionSignature(functionName, - Arrays.asList(BOOLEAN, BOOLEAN)), binaryPredicate(functionName, - andTable, BOOLEAN)) - .build(); + return FunctionDSL.define(BuiltinFunctionName.AND.getName(), FunctionDSL + .impl((v1, v2) -> lookupTableFunction(v1, v2, andTable), BOOLEAN, BOOLEAN, + BOOLEAN)); } private static FunctionResolver or() { - FunctionName functionName = BuiltinFunctionName.OR.getName(); - return FunctionResolver.builder() - .functionName(functionName) - .functionBundle(new FunctionSignature(functionName, - Arrays.asList(BOOLEAN, BOOLEAN)), binaryPredicate(functionName, - orTable, BOOLEAN)) - .build(); + return FunctionDSL.define(BuiltinFunctionName.OR.getName(), FunctionDSL + .impl((v1, v2) -> lookupTableFunction(v1, v2, orTable), BOOLEAN, BOOLEAN, + BOOLEAN)); } private static FunctionResolver xor() { - FunctionName functionName = BuiltinFunctionName.XOR.getName(); - return FunctionResolver.builder() - .functionName(functionName) - .functionBundle(new FunctionSignature(functionName, - Arrays.asList(BOOLEAN, BOOLEAN)), binaryPredicate(functionName, - xorTable, BOOLEAN)) - .build(); + return FunctionDSL.define(BuiltinFunctionName.XOR.getName(), FunctionDSL + .impl((v1, v2) -> lookupTableFunction(v1, v2, xorTable), BOOLEAN, BOOLEAN, + BOOLEAN)); } private static FunctionResolver equal() { return FunctionDSL.define(BuiltinFunctionName.EQUAL.getName(), ExprCoreType.coreTypes().stream() - .map(type -> FunctionDSL.impl((v1, v2) -> ExprBooleanValue.of(v1.equals(v2)), + .map(type -> FunctionDSL.impl( + FunctionDSL.nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.equals(v2))), BOOLEAN, type, type)) .collect( Collectors.toList())); @@ -206,160 +179,87 @@ private static FunctionResolver notEqual() { return FunctionDSL .define(BuiltinFunctionName.NOTEQUAL.getName(), ExprCoreType.coreTypes().stream() .map(type -> FunctionDSL - .impl((v1, v2) -> ExprBooleanValue.of(!v1.equals(v2)), BOOLEAN, type, type)) + .impl( + FunctionDSL + .nullMissingHandling((v1, v2) -> ExprBooleanValue.of(!v1.equals(v2))), + BOOLEAN, + type, + type)) .collect( Collectors.toList())); } private static FunctionResolver less() { - return new FunctionResolver( - BuiltinFunctionName.LESS.getName(), - predicate( - BuiltinFunctionName.LESS.getName(), - (v1, v2) -> v1 < v2, - (v1, v2) -> v1 < v2, - (v1, v2) -> v1 < v2, - (v1, v2) -> v1 < v2, - (v1, v2) -> v1.compareTo(v2) < 0 - ) - ); + return FunctionDSL + .define(BuiltinFunctionName.LESS.getName(), ExprCoreType.coreTypes().stream() + .map(type -> FunctionDSL + .impl(FunctionDSL + .nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) < 0)), + BOOLEAN, + type, type)) + .collect( + Collectors.toList())); } private static FunctionResolver lte() { - return new FunctionResolver( - BuiltinFunctionName.LTE.getName(), - predicate( - BuiltinFunctionName.LTE.getName(), - (v1, v2) -> v1 <= v2, - (v1, v2) -> v1 <= v2, - (v1, v2) -> v1 <= v2, - (v1, v2) -> v1 <= v2, - (v1, v2) -> v1.compareTo(v2) <= 0 - ) - ); + return FunctionDSL + .define(BuiltinFunctionName.LTE.getName(), ExprCoreType.coreTypes().stream() + .map(type -> FunctionDSL + .impl( + FunctionDSL + .nullMissingHandling( + (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) <= 0)), + BOOLEAN, + type, type)) + .collect( + Collectors.toList())); } private static FunctionResolver greater() { - return new FunctionResolver( - BuiltinFunctionName.GREATER.getName(), - predicate( - BuiltinFunctionName.GREATER.getName(), - (v1, v2) -> v1 > v2, - (v1, v2) -> v1 > v2, - (v1, v2) -> v1 > v2, - (v1, v2) -> v1 > v2, - (v1, v2) -> v1.compareTo(v2) > 0 - ) - ); + return FunctionDSL + .define(BuiltinFunctionName.GREATER.getName(), ExprCoreType.coreTypes().stream() + .map(type -> FunctionDSL + .impl(FunctionDSL + .nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) > 0)), + BOOLEAN, type, type)) + .collect( + Collectors.toList())); } private static FunctionResolver gte() { - return new FunctionResolver( - BuiltinFunctionName.GTE.getName(), - predicate( - BuiltinFunctionName.GTE.getName(), - (v1, v2) -> v1 >= v2, - (v1, v2) -> v1 >= v2, - (v1, v2) -> v1 >= v2, - (v1, v2) -> v1 >= v2, - (v1, v2) -> v1.compareTo(v2) >= 0 - ) - ); + return FunctionDSL + .define(BuiltinFunctionName.GTE.getName(), ExprCoreType.coreTypes().stream() + .map(type -> FunctionDSL + .impl( + FunctionDSL.nullMissingHandling( + (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) >= 0)), + BOOLEAN, + type, type)) + .collect( + Collectors.toList())); } private static FunctionResolver like() { - return new FunctionResolver( - BuiltinFunctionName.LIKE.getName(), - predicate( - BuiltinFunctionName.LIKE.getName(), - (v1, v2) -> matches(v2, v1) - ) - ); + return FunctionDSL.define(BuiltinFunctionName.LIKE.getName(), FunctionDSL + .impl(FunctionDSL.nullMissingHandling(OperatorUtils::matches), BOOLEAN, STRING, + STRING)); } - /** - * Util method to generate binary predicate bundles. - * Applicable for integer, long, float, double, string types of operands - * Missing/Null value operands follow as {@param table} lists - */ - private static Map predicate( - FunctionName functionName, - BiFunction integerFunc, - BiFunction longFunc, - BiFunction floatFunc, - BiFunction doubleFunc, - BiFunction stringFunc) { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - return builder - .put( - new FunctionSignature(functionName, Arrays.asList(INTEGER, INTEGER)), - binaryOperator( - functionName, integerFunc, ExprValueUtils::getIntegerValue, BOOLEAN)) - .put( - new FunctionSignature(functionName, Arrays.asList(LONG, LONG)), - binaryOperator( - functionName, longFunc, ExprValueUtils::getLongValue, BOOLEAN)) - .put( - new FunctionSignature(functionName, Arrays.asList(FLOAT, FLOAT)), - binaryOperator( - functionName, floatFunc, ExprValueUtils::getFloatValue, BOOLEAN)) - .put( - new FunctionSignature(functionName, Arrays.asList(DOUBLE, DOUBLE)), - binaryOperator( - functionName, doubleFunc, ExprValueUtils::getDoubleValue, BOOLEAN)) - .put( - new FunctionSignature(functionName, Arrays.asList(STRING, STRING)), - binaryOperator( - functionName, stringFunc, ExprValueUtils::getStringValue, BOOLEAN)) - .build(); + private static FunctionResolver notLike() { + return FunctionDSL.define(BuiltinFunctionName.NOT_LIKE.getName(), FunctionDSL + .impl(FunctionDSL.nullMissingHandling( + (v1, v2) -> UnaryPredicateOperator.not(OperatorUtils.matches(v1, v2))), + BOOLEAN, + STRING, + STRING)); } - /** - * Util method to generate LIKE predicate bundles. - * Applicable for string operands. - */ - private static Map predicate( - FunctionName functionName, - BiFunction stringFunc) { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - return builder - .put(new FunctionSignature(functionName, Arrays.asList(STRING, STRING)), - binaryOperator(functionName, stringFunc, ExprValueUtils::getStringValue, - BOOLEAN)) - .build(); - } - - - /** - * Building method to construct binary logical predicates AND OR XOR - * Where operands order does not matter. - * Special cases for missing/null operands refer to {@param table}. - */ - private static FunctionBuilder binaryPredicate(FunctionName functionName, - Table table, - ExprCoreType returnType) { - return arguments -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment env) { - ExprValue arg1 = arguments.get(0).valueOf(env); - ExprValue arg2 = arguments.get(1).valueOf(env); - if (table.contains(arg1, arg2)) { - return table.get(arg1, arg2); - } else { - return table.get(arg2, arg1); - } - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s %s %s", arguments.get(0).toString(), functionName, arguments - .get(1).toString()); - } - }; + private static ExprValue lookupTableFunction(ExprValue arg1, ExprValue arg2, + Table table) { + if (table.contains(arg1, arg2)) { + return table.get(arg1, arg2); + } else { + return table.get(arg2, arg1); + } } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java index f849a72bff..18a621acb3 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java @@ -15,26 +15,17 @@ package com.amazon.opendistroforelasticsearch.sql.expression.operator.predicate; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_TRUE; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; -import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; -import com.amazon.opendistroforelasticsearch.sql.expression.Expression; -import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; -import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionBuilder; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionResolver; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionSignature; -import com.google.common.collect.ImmutableMap; import java.util.Arrays; -import java.util.Map; +import java.util.stream.Collectors; import lombok.experimental.UtilityClass; /** @@ -43,8 +34,18 @@ */ @UtilityClass public class UnaryPredicateOperator { + /** + * Register Unary Predicate Function. + */ public static void register(BuiltinFunctionRepository repository) { repository.register(not()); + repository.register(isNull()); + repository.register(isNotNull()); + } + + private static FunctionResolver not() { + return FunctionDSL.define(BuiltinFunctionName.NOT.getName(), FunctionDSL + .impl(UnaryPredicateOperator::not, BOOLEAN, BOOLEAN)); } /** @@ -55,41 +56,30 @@ public static void register(BuiltinFunctionRepository repository) { * NULL NULL * MISSING MISSING */ - private static Map notMap = - new ImmutableMap.Builder() - .put(LITERAL_TRUE, LITERAL_FALSE) - .put(LITERAL_FALSE, LITERAL_TRUE) - .put(LITERAL_NULL, LITERAL_NULL) - .put(LITERAL_MISSING, LITERAL_MISSING) - .build(); - - private static FunctionResolver not() { - FunctionName functionName = BuiltinFunctionName.NOT.getName(); - return FunctionResolver.builder() - .functionName(functionName) - .functionBundle(new FunctionSignature(functionName, - Arrays.asList(BOOLEAN)), predicateFunction(functionName, BOOLEAN)) - .build(); + public ExprValue not(ExprValue v) { + if (v.isMissing() || v.isNull()) { + return v; + } else { + return ExprBooleanValue.of(!v.booleanValue()); + } } - private static FunctionBuilder predicateFunction( - FunctionName functionName, - ExprType returnType) { - return arguments -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment env) { - return notMap.get(arguments.get(0).valueOf(env)); - } + private static FunctionResolver isNull() { - @Override - public ExprType type() { - return returnType; - } + return FunctionDSL + .define(BuiltinFunctionName.IS_NULL.getName(), Arrays.stream(ExprCoreType.values()) + .map(type -> FunctionDSL + .impl((v) -> ExprBooleanValue.of(v.isNull()), BOOLEAN, type)) + .collect( + Collectors.toList())); + } - @Override - public String toString() { - return String.format("%s %s", functionName, arguments.get(0).toString()); - } - }; + private static FunctionResolver isNotNull() { + return FunctionDSL + .define(BuiltinFunctionName.IS_NOT_NULL.getName(), Arrays.stream(ExprCoreType.values()) + .map(type -> FunctionDSL + .impl((v) -> ExprBooleanValue.of(!v.isNull()), BOOLEAN, type)) + .collect( + Collectors.toList())); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanDSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanDSL.java index 2f828ad9e6..1066b279fc 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanDSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanDSL.java @@ -18,6 +18,7 @@ import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort.SortOption; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.LiteralExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; import com.google.common.collect.ImmutableSet; @@ -50,7 +51,7 @@ public static LogicalPlan rename( return new LogicalRename(input, renameMap); } - public static LogicalPlan project(LogicalPlan input, Expression... fields) { + public static LogicalPlan project(LogicalPlan input, NamedExpression... fields) { return new LogicalProject(input, Arrays.asList(fields)); } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalProject.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalProject.java index edf179903c..a68b176d60 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalProject.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalProject.java @@ -15,7 +15,7 @@ package com.amazon.opendistroforelasticsearch.sql.planner.logical; -import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import java.util.Arrays; import java.util.List; import lombok.EqualsAndHashCode; @@ -32,7 +32,7 @@ public class LogicalProject extends LogicalPlan { private final LogicalPlan child; @Getter - private final List projectList; + private final List projectList; @Override public List getChild() { diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperator.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperator.java index be6d48be39..7dec9266b7 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperator.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperator.java @@ -1,7 +1,6 @@ package com.amazon.opendistroforelasticsearch.sql.planner.physical; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; -import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.operator.predicate.BinaryPredicateOperator; import com.amazon.opendistroforelasticsearch.sql.storage.bindingtuple.BindingTuple; @@ -42,7 +41,7 @@ public boolean hasNext() { while (input.hasNext()) { ExprValue inputValue = input.next(); ExprValue exprValue = conditions.valueOf(inputValue.bindingTuples()); - if (ExprValueUtils.getBooleanValue(exprValue)) { + if (!(exprValue.isNull() || exprValue.isMissing()) && (exprValue.booleanValue())) { next = inputValue; return true; } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlan.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlan.java index e7f64c84ec..56d6fc6e83 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlan.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlan.java @@ -16,6 +16,7 @@ package com.amazon.opendistroforelasticsearch.sql.planner.physical; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; import com.amazon.opendistroforelasticsearch.sql.planner.PlanNode; import java.util.Iterator; @@ -43,4 +44,9 @@ public void open() { public void close() { getChild().forEach(PhysicalPlan::close); } + + public ExecutionEngine.Schema schema() { + throw new IllegalStateException(String.format("[BUG] schema can been only applied to " + + "ProjectOperator, instead of %s", toString())); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanDSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanDSL.java index 6dedd39e04..40a8348976 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanDSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanDSL.java @@ -18,6 +18,7 @@ import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort.SortOption; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.LiteralExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; import com.google.common.collect.ImmutableSet; @@ -47,7 +48,7 @@ public static RenameOperator rename( return new RenameOperator(input, renameMap); } - public static ProjectOperator project(PhysicalPlan input, Expression... fields) { + public static ProjectOperator project(PhysicalPlan input, NamedExpression... fields) { return new ProjectOperator(input, Arrays.asList(fields)); } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperator.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperator.java index 04aa049e57..32a6906298 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperator.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperator.java @@ -17,11 +17,13 @@ import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; -import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -37,7 +39,7 @@ public class ProjectOperator extends PhysicalPlan { @Getter private final PhysicalPlan input; @Getter - private final List projectList; + private final List projectList; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -58,13 +60,17 @@ public boolean hasNext() { public ExprValue next() { ExprValue inputValue = input.next(); ImmutableMap.Builder mapBuilder = new Builder<>(); - for (Expression expr : projectList) { + for (NamedExpression expr : projectList) { ExprValue exprValue = expr.valueOf(inputValue.bindingTuples()); - // missing value is ignored. - if (!exprValue.isMissing()) { - mapBuilder.put(expr.toString(), exprValue); - } + mapBuilder.put(expr.getName(), exprValue); } return ExprTupleValue.fromExprValueMap(mapBuilder.build()); } + + @Override + public ExecutionEngine.Schema schema() { + return new ExecutionEngine.Schema(getProjectList().stream() + .map(expr -> new ExecutionEngine.Schema.Column(expr.getName(), + expr.getAlias(), expr.type())).collect(Collectors.toList())); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/OperatorUtils.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/OperatorUtils.java index 2b5479ebf9..d9b3352c69 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/OperatorUtils.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/OperatorUtils.java @@ -15,6 +15,8 @@ package com.amazon.opendistroforelasticsearch.sql.utils; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import java.util.regex.Pattern; import lombok.experimental.UtilityClass; @@ -27,8 +29,10 @@ public class OperatorUtils { * @param pattern string pattern to match. * @return if text matches pattern returns true; else return false. */ - public static boolean matches(String pattern, String text) { - return Pattern.compile(patternToRegex(pattern)).matcher(text).matches(); + public static ExprBooleanValue matches(ExprValue text, ExprValue pattern) { + return ExprBooleanValue + .of(Pattern.compile(patternToRegex(pattern.stringValue())).matcher(text.stringValue()) + .matches()); } private static final char DEFAULT_ESCAPE = '\\'; diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTest.java index b7ffc28387..df804cd262 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTest.java @@ -32,13 +32,21 @@ import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlanDSL; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Collections; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +@Configuration +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = {ExpressionConfig.class, AnalyzerTest.class}) class AnalyzerTest extends AnalyzerTestBase { @Test public void filter_relation() { @@ -131,13 +139,15 @@ public void rename_to_invalid_expression() { public void project_source() { assertAnalyzeEqual( LogicalPlanDSL.project( - LogicalPlanDSL.relation("schema"), DSL.ref("integer_value", INTEGER), DSL.ref( - "double_value", DOUBLE)), + LogicalPlanDSL.relation("schema"), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), + DSL.named("double_value", DSL.ref("double_value", DOUBLE)) + ), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.field("integer_value"), - AstDSL.field("double_value"))); + AstDSL.field("integer_value"), // Field not wrapped by Alias + AstDSL.alias("double_value", AstDSL.field("double_value")))); } @Test @@ -176,15 +186,15 @@ public void project_values() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.values(ImmutableList.of(DSL.literal(123))), - DSL.literal(123), - DSL.literal("hello"), - DSL.literal(false) + DSL.named("123", DSL.literal(123)), + DSL.named("hello", DSL.literal("hello")), + DSL.named("false", DSL.literal(false)) ), AstDSL.project( AstDSL.values(ImmutableList.of(AstDSL.intLiteral(123))), - AstDSL.intLiteral(123), - AstDSL.stringLiteral("hello"), - AstDSL.booleanLiteral(false) + AstDSL.alias("123", AstDSL.intLiteral(123)), + AstDSL.alias("hello", AstDSL.stringLiteral("hello")), + AstDSL.alias("false", AstDSL.booleanLiteral(false)) ) ); } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTestBase.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTestBase.java index 18e23cf1ba..1a7173fca5 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTestBase.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/AnalyzerTestBase.java @@ -17,29 +17,77 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +import com.amazon.opendistroforelasticsearch.sql.analysis.symbol.Namespace; +import com.amazon.opendistroforelasticsearch.sql.analysis.symbol.Symbol; import com.amazon.opendistroforelasticsearch.sql.analysis.symbol.SymbolTable; import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; import com.amazon.opendistroforelasticsearch.sql.config.TestConfig; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; -import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; +import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlan; +import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; import com.amazon.opendistroforelasticsearch.sql.storage.StorageEngine; -import org.junit.jupiter.api.extension.ExtendWith; +import com.amazon.opendistroforelasticsearch.sql.storage.Table; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit.jupiter.SpringExtension; -@Configuration -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes = {ExpressionConfig.class, AnalyzerTestBase.class, TestConfig.class}) + public class AnalyzerTestBase { + protected Map typeMapping() { + return TestConfig.typeMapping; + } + + @Bean + protected StorageEngine storageEngine() { + return new StorageEngine() { + @Override + public Table getTable(String name) { + return new Table() { + @Override + public Map getFieldTypes() { + return typeMapping(); + } + + @Override + public PhysicalPlan implement(LogicalPlan plan) { + throw new UnsupportedOperationException(); + } + }; + } + }; + } + + + @Bean + protected SymbolTable symbolTable() { + SymbolTable symbolTable = new SymbolTable(); + typeMapping().entrySet() + .forEach( + entry -> symbolTable + .store(new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); + return symbolTable; + } + + @Bean + protected Environment typeEnv() { + return var -> { + if (var instanceof ReferenceExpression) { + ReferenceExpression refExpr = (ReferenceExpression) var; + if (typeMapping().containsKey(refExpr.getAttr())) { + return typeMapping().get(refExpr.getAttr()); + } + } + throw new ExpressionEvaluationException("type resolved failed"); + }; + } + @Autowired protected DSL dsl; diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java index b9ff216110..68b74e5f38 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java @@ -25,12 +25,20 @@ import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; +import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; - +@Configuration +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = {ExpressionConfig.class, AnalyzerTestBase.class}) class ExpressionAnalyzerTest extends AnalyzerTestBase { @Test @@ -73,6 +81,48 @@ public void not() { ); } + @Test + public void qualified_name() { + assertAnalyzeEqual( + DSL.ref("integer_value", INTEGER), + AstDSL.qualifiedName("integer_value") + ); + } + + @Test + public void skip_identifier_with_qualifier() { + SyntaxCheckException exception = + assertThrows(SyntaxCheckException.class, + () -> analyze(AstDSL.qualifiedName("index_alias", "integer_value"))); + + assertEquals( + "Qualified name [index_alias.integer_value] is not supported yet", + exception.getMessage() + ); + } + + @Test + public void skip_struct_data_type() { + SyntaxCheckException exception = + assertThrows(SyntaxCheckException.class, + () -> analyze(AstDSL.qualifiedName("struct_value"))); + assertEquals( + "Identifier [struct_value] of type [STRUCT] is not supported yet", + exception.getMessage() + ); + } + + @Test + public void skip_array_data_type() { + SyntaxCheckException exception = + assertThrows(SyntaxCheckException.class, + () -> analyze(AstDSL.qualifiedName("array_value"))); + assertEquals( + "Identifier [array_value] of type [ARRAY] is not supported yet", + exception.getMessage() + ); + } + @Test public void undefined_var_semantic_check_failed() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectAnalyzeTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectAnalyzeTest.java new file mode 100644 index 0000000000..376e7538c9 --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectAnalyzeTest.java @@ -0,0 +1,161 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.analysis; + +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.argument; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.booleanLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.field; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; + +import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; +import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlanDSL; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@Configuration +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = {ExpressionConfig.class, SelectAnalyzeTest.class}) +public class SelectAnalyzeTest extends AnalyzerTestBase { + + @Override + protected Map typeMapping() { + return new ImmutableMap.Builder() + .put("integer_value", ExprCoreType.INTEGER) + .put("double_value", ExprCoreType.DOUBLE) + .put("string_value", ExprCoreType.STRING) + .build(); + } + + @Test + public void project_all_from_source() { + assertAnalyzeEqual( + LogicalPlanDSL.project( + LogicalPlanDSL.relation("schema"), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), + DSL.named("double_value", DSL.ref("double_value", DOUBLE)), + DSL.named("string_value", DSL.ref("string_value", STRING)), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), + DSL.named("double_value", DSL.ref("double_value", DOUBLE)) + ), + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.field("integer_value"), // Field not wrapped by Alias + AstDSL.alias("double_value", AstDSL.field("double_value")), + AllFields.of())); + } + + @Test + public void select_and_project_all() { + assertAnalyzeEqual( + LogicalPlanDSL.project( + LogicalPlanDSL.project( + LogicalPlanDSL.relation("schema"), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), + DSL.named("double_value", DSL.ref("double_value", DOUBLE)) + ), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), + DSL.named("double_value", DSL.ref("double_value", DOUBLE)) + ), + AstDSL.projectWithArg( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.field("integer_value"), + AstDSL.field("double_value")), + AstDSL.defaultFieldsArgs(), + AllFields.of() + )); + } + + @Test + public void remove_and_project_all() { + assertAnalyzeEqual( + LogicalPlanDSL.project( + LogicalPlanDSL.remove( + LogicalPlanDSL.relation("schema"), + DSL.ref("integer_value", INTEGER), + DSL.ref("double_value", DOUBLE) + ), + DSL.named("string_value", DSL.ref("string_value", STRING)) + ), + AstDSL.projectWithArg( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.exprList(argument("exclude", booleanLiteral(true))), + AstDSL.field("integer_value"), + AstDSL.field("double_value")), + AstDSL.defaultFieldsArgs(), + AllFields.of() + )); + } + + @Test + public void stats_and_project_all() { + assertAnalyzeEqual( + LogicalPlanDSL.project( + LogicalPlanDSL.aggregation( + LogicalPlanDSL.relation("schema"), + ImmutableList.of(dsl.avg(DSL.ref("integer_value", INTEGER))), + ImmutableList.of(DSL.ref("string_value", STRING))), + DSL.named("string_value", DSL.ref("string_value", STRING)), + DSL.named("avg(integer_value)", DSL.ref("avg(integer_value)", DOUBLE)) + ), + AstDSL.projectWithArg( + AstDSL.agg( + AstDSL.relation("schema"), + AstDSL.exprList(AstDSL.aggregate("avg", field("integer_value"))), + null, + ImmutableList.of(field("string_value")), + AstDSL.defaultStatsArgs()), AstDSL.defaultFieldsArgs(), + AllFields.of())); + } + + @Test + public void rename_and_project_all() { + assertAnalyzeEqual( + LogicalPlanDSL.project( + LogicalPlanDSL.rename( + LogicalPlanDSL.relation("schema"), + ImmutableMap.of(DSL.ref("integer_value", INTEGER), DSL.ref("ivalue", INTEGER))), + DSL.named("ivalue", DSL.ref("ivalue", INTEGER)), + DSL.named("string_value", DSL.ref("string_value", STRING)), + DSL.named("double_value", DSL.ref("double_value", DOUBLE)) + ), + AstDSL.projectWithArg( + AstDSL.rename( + AstDSL.relation("schema"), + AstDSL.map(AstDSL.field("integer_value"), AstDSL.field("ivalue"))), + AstDSL.defaultFieldsArgs(), + AllFields.of() + )); + } +} diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectExpressionAnalyzerTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectExpressionAnalyzerTest.java new file mode 100644 index 0000000000..42aa1d06f5 --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/SelectExpressionAnalyzerTest.java @@ -0,0 +1,69 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.analysis; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@Configuration +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = {ExpressionConfig.class, SelectExpressionAnalyzerTest.class}) +public class SelectExpressionAnalyzerTest extends AnalyzerTestBase { + + + @Test + public void named_expression() { + assertAnalyzeEqual( + DSL.named("int", DSL.ref("integer_value", INTEGER)), + AstDSL.alias("int", AstDSL.qualifiedName("integer_value")) + ); + } + + @Test + public void named_expression_with_alias() { + assertAnalyzeEqual( + DSL.named("integer", DSL.ref("integer_value", INTEGER), "int"), + AstDSL.alias("integer", AstDSL.qualifiedName("integer_value"), "int") + ); + } + + protected List analyze(UnresolvedExpression unresolvedExpression) { + + return new SelectExpressionAnalyzer(expressionAnalyzer) + .analyze(Arrays.asList(unresolvedExpression), + analysisContext); + } + + protected void assertAnalyzeEqual(NamedExpression expected, + UnresolvedExpression unresolvedExpression) { + assertEquals(Arrays.asList(expected), analyze(unresolvedExpression)); + } +} diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTableTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTableTest.java index 93cc481667..f2557edbf3 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTableTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/symbol/SymbolTableTest.java @@ -30,18 +30,35 @@ import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import java.util.Map; import java.util.Optional; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class SymbolTableTest { - private final SymbolTable symbolTable = new SymbolTable(); + private SymbolTable symbolTable; + + @BeforeEach + public void setup() { + symbolTable = new SymbolTable(); + } @Test public void defineFieldSymbolShouldBeAbleToResolve() { defineSymbolShouldBeAbleToResolve(new Symbol(Namespace.FIELD_NAME, "age"), INTEGER); } + @Test + public void removeSymbolCannotBeResolve() { + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "age"), INTEGER); + + Optional age = symbolTable.lookup(new Symbol(Namespace.FIELD_NAME, "age")); + assertTrue(age.isPresent()); + + symbolTable.remove(new Symbol(Namespace.FIELD_NAME, "age")); + age = symbolTable.lookup(new Symbol(Namespace.FIELD_NAME, "age")); + assertFalse(age.isPresent()); + } @Test public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { @@ -61,6 +78,24 @@ public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { ); } + @Test + public void lookupAllFieldsReturnFieldsWithoutDot() { + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "active"), BOOLEAN); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.address"), STRING); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), STRING); + + Map typeByName = + symbolTable.lookupAllFields(Namespace.FIELD_NAME); + + assertThat( + typeByName, + allOf( + aMapWithSize(1), + hasEntry("active", BOOLEAN) + ) + ); + } + @Test public void failedToResolveSymbolNoNamespaceMatched() { symbolTable.store(new Symbol(Namespace.FUNCTION_NAME, "customFunction"), BOOLEAN); diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java index e3eb0b043c..59a13fb8e2 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java @@ -47,7 +47,7 @@ public class TestConfig { public static final String STRING_TYPE_NULL_VALUE_FILED = "string_null_value"; public static final String STRING_TYPE_MISSING_VALUE_FILED = "string_missing_value"; - private static Map typeMapping = new ImmutableMap.Builder() + public static Map typeMapping = new ImmutableMap.Builder() .put("integer_value", ExprCoreType.INTEGER) .put(INT_TYPE_NULL_VALUE_FIELD, ExprCoreType.INTEGER) .put(INT_TYPE_MISSING_VALUE_FIELD, ExprCoreType.INTEGER) diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java index 13cdd5f24f..26f2ea3699 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java @@ -15,12 +15,16 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; import static com.amazon.opendistroforelasticsearch.sql.utils.ComparisonUtil.compare; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import org.junit.jupiter.api.Test; @@ -34,16 +38,24 @@ public void test_is_missing() { @Test public void getValue() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> LITERAL_MISSING.value()); - assertEquals("invalid to call value operation on missing value", exception.getMessage()); + assertNull(LITERAL_MISSING.value()); } @Test public void getType() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> LITERAL_MISSING.type()); - assertEquals("invalid to call type operation on missing value", exception.getMessage()); + assertEquals(ExprCoreType.UNKNOWN, LITERAL_MISSING.type()); + } + + @Test + public void toStringTest() { + assertEquals("MISSING", LITERAL_MISSING.toString()); + } + + @Test + public void equal() { + assertTrue(LITERAL_MISSING.equals(LITERAL_MISSING)); + assertFalse(LITERAL_FALSE.equals(LITERAL_MISSING)); + assertFalse(LITERAL_MISSING.equals(LITERAL_FALSE)); } @Test diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java index 8b9127fe56..ca4c85cb6e 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java @@ -15,14 +15,16 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; import static com.amazon.opendistroforelasticsearch.sql.utils.ComparisonUtil.compare; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import org.junit.jupiter.api.Test; @@ -40,9 +42,19 @@ public void getValue() { @Test public void getType() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> LITERAL_NULL.type()); - assertEquals("invalid to call type operation on null value", exception.getMessage()); + assertEquals(ExprCoreType.UNKNOWN, LITERAL_NULL.type()); + } + + @Test + public void toStringTest() { + assertEquals("NULL", LITERAL_NULL.toString()); + } + + @Test + public void equal() { + assertTrue(LITERAL_NULL.equals(LITERAL_NULL)); + assertFalse(LITERAL_FALSE.equals(LITERAL_NULL)); + assertFalse(LITERAL_NULL.equals(LITERAL_FALSE)); } @Test diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java index cd79cc13d5..e78c830058 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java @@ -18,6 +18,7 @@ import static com.amazon.opendistroforelasticsearch.sql.utils.ComparisonUtil.compare; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -39,6 +40,15 @@ public void tuple_compare_int() { assertFalse(tupleValue.equals(intValue)); } + @Test + public void compare_tuple_with_different_key() { + ExprValue tupleValue1 = ExprValueUtils.tupleValue(ImmutableMap.of("value", 2)); + ExprValue tupleValue2 = + ExprValueUtils.tupleValue(ImmutableMap.of("integer_value", 2, "float_value", 1f)); + assertNotEquals(tupleValue1, tupleValue2); + assertNotEquals(tupleValue2, tupleValue1); + } + @Test public void compare_tuple_with_different_size() { ExprValue tupleValue1 = ExprValueUtils.tupleValue(ImmutableMap.of("integer_value", 2)); diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueCompareTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueCompareTest.java index e5d6d877b3..1ef39a79bd 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueCompareTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueCompareTest.java @@ -17,7 +17,11 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import org.junit.jupiter.api.Test; @@ -51,33 +55,38 @@ public void timestampValueCompare() { } @Test - public void nullValueEqualToNullValue() { - assertEquals(0, ExprNullValue.of().compareTo(ExprNullValue.of())); - } + public void missingCompareToMethodShouldNotBeenCalledDirectly() { + IllegalStateException exception = assertThrows(IllegalStateException.class, + () -> LITERAL_MISSING.compareTo(LITERAL_FALSE)); + assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", + exception.getMessage()); - @Test - public void nullValueLessThanNotNullValue() { - assertEquals(-1, ExprNullValue.of().compareTo(ExprBooleanValue.of(true))); - assertEquals(1, ExprBooleanValue.of(true).compareTo(ExprNullValue.of())); - } + exception = assertThrows(IllegalStateException.class, + () -> LITERAL_FALSE.compareTo(LITERAL_MISSING)); + assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", + exception.getMessage()); - @Test - public void missingValueEqualToMissingValue() { - assertEquals(0, ExprMissingValue.of().compareTo(ExprMissingValue.of())); + exception = assertThrows(IllegalStateException.class, + () -> ExprMissingValue.of().compare(LITERAL_MISSING)); + assertEquals("[BUG] Unreachable, Comparing with MISSING is undefined", + exception.getMessage()); } @Test - public void missingValueLessThanNotMissingValue() { - assertEquals(-1, ExprMissingValue.of().compareTo(ExprBooleanValue.of(true))); - assertEquals(1, ExprBooleanValue.of(true).compareTo(ExprMissingValue.of())); + public void nullCompareToMethodShouldNotBeenCalledDirectly() { + IllegalStateException exception = assertThrows(IllegalStateException.class, + () -> LITERAL_NULL.compareTo(LITERAL_FALSE)); + assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", + exception.getMessage()); - assertEquals(-1, ExprMissingValue.of().compareTo(ExprNullValue.of())); - assertEquals(1, ExprNullValue.of().compareTo(ExprMissingValue.of())); - } + exception = assertThrows(IllegalStateException.class, + () -> LITERAL_FALSE.compareTo(LITERAL_NULL)); + assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", + exception.getMessage()); - @Test - public void missingValueLessThanNullValue() { - assertEquals(-1, ExprMissingValue.of().compareTo(ExprNullValue.of())); - assertEquals(1, ExprNullValue.of().compareTo(ExprMissingValue.of())); + exception = assertThrows(IllegalStateException.class, + () -> ExprNullValue.of().compare(LITERAL_MISSING)); + assertEquals("[BUG] Unreachable, Comparing with NULL is undefined", + exception.getMessage()); } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java index 755501f86c..e80668ee57 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java @@ -194,15 +194,21 @@ public void natural_order_tuple_value() { tupleValue(ImmutableMap.of("v1", 3)), tupleValue(ImmutableMap.of("v1", 1, "v2", 2)))); } + @Test + public void order_compare_value_with_compatible_number_type() { + ExprValueOrdering ordering = ExprValueOrdering.natural(); + assertEquals(1, ordering.compare(integerValue(2), doubleValue(1d))); + } + @Test public void order_compare_value_with_different_type() { ExprValueOrdering ordering = ExprValueOrdering.natural(); ExpressionEvaluationException exception = assertThrows( ExpressionEvaluationException.class, - () -> ordering.compare(integerValue(1), doubleValue(2d))); + () -> ordering.compare(integerValue(1), stringValue("2"))); assertEquals( - "compare expected value have same type, but with [INTEGER, DOUBLE]", + "compare expected value have same type, but with [INTEGER, STRING]", exception.getMessage()); } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionNodeVisitorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionNodeVisitorTest.java new file mode 100644 index 0000000000..58a4eff006 --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionNodeVisitorTest.java @@ -0,0 +1,89 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.named; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; +import java.util.List; +import java.util.stream.Collectors; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +class ExpressionNodeVisitorTest { + + private final DSL dsl = new ExpressionConfig().dsl(new ExpressionConfig().functionRepository()); + + @Test + void should_return_null_by_default() { + ExpressionNodeVisitor visitor = new ExpressionNodeVisitor(){}; + assertNull(literal(10).accept(visitor, null)); + assertNull(ref("name", STRING).accept(visitor, null)); + assertNull(named("bool", literal(true)).accept(visitor, null)); + assertNull(dsl.abs(literal(-10)).accept(visitor, null)); + assertNull(dsl.sum(literal(10)).accept(visitor, null)); + } + + @Test + void can_visit_all_types_of_expression_node() { + Expression expr = + dsl.sum( + dsl.add( + ref("balance", INTEGER), + literal(10))); + + Expression actual = expr.accept(new ExpressionNodeVisitor() { + @Override + public Expression visitLiteral(LiteralExpression node, Object context) { + return node; + } + + @Override + public Expression visitReference(ReferenceExpression node, Object context) { + return node; + } + + @Override + public Expression visitFunction(FunctionExpression node, Object context) { + return dsl.add(visitArguments(node.getArguments(), context)); + } + + @Override + public Expression visitAggregator(Aggregator node, Object context) { + return dsl.sum(visitArguments(node.getArguments(), context)); + } + + private Expression[] visitArguments(List arguments, Object context) { + return arguments.stream() + .map(arg -> arg.accept(this, context)) + .toArray(Expression[]::new); + } + }, null); + + assertEquals(expr, actual); + } + +} \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/NamedExpressionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/NamedExpressionTest.java new file mode 100644 index 0000000000..c8330d2fba --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/NamedExpressionTest.java @@ -0,0 +1,54 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +class NamedExpressionTest extends ExpressionTestBase { + + @Test + void name_an_expression() { + LiteralExpression delegated = DSL.literal(10); + NamedExpression namedExpression = DSL.named("10", delegated); + + assertEquals("10", namedExpression.getName()); + assertEquals(delegated.type(), namedExpression.type()); + assertEquals(delegated.valueOf(valueEnv()), namedExpression.valueOf(valueEnv())); + } + + @Test + void name_an_expression_with_alias() { + LiteralExpression delegated = DSL.literal(10); + NamedExpression namedExpression = DSL.named("10", delegated, "ten"); + assertEquals("ten", namedExpression.getName()); + } + + @Test + void name_an_named_expression() { + LiteralExpression delegated = DSL.literal(10); + Expression expression = DSL.named("10", delegated, "ten"); + + NamedExpression namedExpression = DSL.named(expression); + assertEquals("ten", namedExpression.getName()); + } + +} \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java index 5194d70278..84b756643a 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java @@ -34,6 +34,7 @@ import static com.amazon.opendistroforelasticsearch.sql.utils.ComparisonUtil.compare; import static com.amazon.opendistroforelasticsearch.sql.utils.OperatorUtils.matches; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; @@ -356,38 +357,38 @@ public void test_null_equal_missing() { FunctionExpression equal = dsl.equal(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_TRUE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, equal.valueOf(valueEnv())); equal = dsl.equal(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_TRUE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_NULL, equal.valueOf(valueEnv())); equal = dsl.equal(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_FALSE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, equal.valueOf(valueEnv())); equal = dsl.equal(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_FALSE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, equal.valueOf(valueEnv())); equal = dsl.equal(DSL.literal(LITERAL_TRUE), DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_FALSE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, equal.valueOf(valueEnv())); equal = dsl.equal(DSL.literal(LITERAL_TRUE), DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_FALSE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_NULL, equal.valueOf(valueEnv())); equal = dsl.equal(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), DSL.literal(LITERAL_TRUE)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_FALSE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, equal.valueOf(valueEnv())); equal = dsl.equal(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), DSL.literal(LITERAL_TRUE)); assertEquals(BOOLEAN, equal.type()); - assertEquals(LITERAL_FALSE, equal.valueOf(valueEnv())); + assertEquals(LITERAL_NULL, equal.valueOf(valueEnv())); } @ParameterizedTest(name = "equal({0}, {1})") @@ -405,42 +406,42 @@ public void test_null_notequal_missing() { FunctionExpression notequal = dsl.notequal(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_FALSE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, notequal.valueOf(valueEnv())); notequal = dsl.notequal(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_FALSE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_NULL, notequal.valueOf(valueEnv())); notequal = dsl.notequal(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_TRUE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, notequal.valueOf(valueEnv())); notequal = dsl.notequal(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_TRUE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, notequal.valueOf(valueEnv())); notequal = dsl.notequal(DSL.literal(LITERAL_TRUE), DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_TRUE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, notequal.valueOf(valueEnv())); notequal = dsl.notequal(DSL.literal(LITERAL_TRUE), DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_TRUE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_NULL, notequal.valueOf(valueEnv())); notequal = dsl.notequal(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN), DSL.literal(LITERAL_TRUE)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_TRUE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_MISSING, notequal.valueOf(valueEnv())); notequal = dsl.notequal(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN), DSL.literal(LITERAL_TRUE)); assertEquals(BOOLEAN, notequal.type()); - assertEquals(LITERAL_TRUE, notequal.valueOf(valueEnv())); + assertEquals(LITERAL_NULL, notequal.valueOf(valueEnv())); } @ParameterizedTest(name = "less({0}, {1})") @@ -676,8 +677,7 @@ public void test_null_gte_missing() { public void test_like(ExprValue v1, ExprValue v2) { FunctionExpression like = dsl.like(DSL.literal(v1), DSL.literal(v2)); assertEquals(BOOLEAN, like.type()); - assertEquals(matches(((String) v2.value()), (String) v1.value()), - ExprValueUtils.getBooleanValue(like.valueOf(valueEnv()))); + assertEquals(matches(v1, v2), like.valueOf(valueEnv())); assertEquals(String.format("%s like %s", v1.toString(), v2.toString()), like.toString()); } @@ -728,6 +728,18 @@ public void test_null_like_missing() { assertEquals(LITERAL_MISSING, like.valueOf(valueEnv())); } + @Test + public void test_not_like() { + FunctionExpression notLike = dsl.notLike(DSL.literal("bob"), DSL.literal("tom")); + assertEquals(BOOLEAN, notLike.type()); + assertTrue(notLike.valueOf(valueEnv()).booleanValue()); + assertEquals(String.format("\"%s\" not like \"%s\"", "bob", "tom"), notLike.toString()); + + notLike = dsl.notLike(DSL.literal("bob"), DSL.literal("bo%")); + assertFalse(notLike.valueOf(valueEnv()).booleanValue()); + assertEquals(String.format("\"%s\" not like \"%s\"", "bob", "bo%"), notLike.toString()); + } + /** * Todo. remove this test cases after script serilization implemented. */ diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java index 374f8e0fc8..bfdc097080 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java @@ -17,12 +17,15 @@ import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.BOOL_TYPE_MISSING_VALUE_FIELD; import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.BOOL_TYPE_NULL_VALUE_FIELD; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_TRUE; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.booleanValue; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; import static org.junit.jupiter.api.Assertions.assertEquals; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprNullValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionTestBase; @@ -38,20 +41,42 @@ public void test_not(Boolean v) { FunctionExpression not = dsl.not(DSL.literal(booleanValue(v))); assertEquals(BOOLEAN, not.type()); assertEquals(!v, ExprValueUtils.getBooleanValue(not.valueOf(valueEnv()))); - assertEquals(String.format("not %s", v.toString()), not.toString()); + assertEquals(String.format("not(%s)", v.toString()), not.toString()); } @Test public void test_not_null() { - FunctionExpression and = dsl.not(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); - assertEquals(BOOLEAN, and.type()); - assertEquals(LITERAL_NULL, and.valueOf(valueEnv())); + FunctionExpression expression = dsl.not(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); + assertEquals(BOOLEAN, expression.type()); + assertEquals(LITERAL_NULL, expression.valueOf(valueEnv())); } @Test public void test_not_missing() { - FunctionExpression and = dsl.not(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); - assertEquals(BOOLEAN, and.type()); - assertEquals(LITERAL_MISSING, and.valueOf(valueEnv())); + FunctionExpression expression = dsl.not(DSL.ref(BOOL_TYPE_MISSING_VALUE_FIELD, BOOLEAN)); + assertEquals(BOOLEAN, expression.type()); + assertEquals(LITERAL_MISSING, expression.valueOf(valueEnv())); + } + + @Test + public void is_null_predicate() { + FunctionExpression expression = dsl.isnull(DSL.literal(1)); + assertEquals(BOOLEAN, expression.type()); + assertEquals(LITERAL_FALSE, expression.valueOf(valueEnv())); + + expression = dsl.isnull(DSL.literal(ExprNullValue.of())); + assertEquals(BOOLEAN, expression.type()); + assertEquals(LITERAL_TRUE, expression.valueOf(valueEnv())); + } + + @Test + public void is_not_null_predicate() { + FunctionExpression expression = dsl.isnotnull(DSL.literal(1)); + assertEquals(BOOLEAN, expression.type()); + assertEquals(LITERAL_TRUE, expression.valueOf(valueEnv())); + + expression = dsl.isnotnull(DSL.literal(ExprNullValue.of())); + assertEquals(BOOLEAN, expression.type()); + assertEquals(LITERAL_FALSE, expression.valueOf(valueEnv())); } } \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java index 9931304c8e..427883c1d9 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java @@ -19,6 +19,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.named; import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; import static com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlanDSL.aggregation; import static com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlanDSL.eval; @@ -36,6 +37,7 @@ import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.AvgAggregator; @@ -59,7 +61,7 @@ class DefaultImplementorTest { @Test public void visitShouldReturnDefaultPhysicalOperator() { String indexName = "test"; - ReferenceExpression include = ref("age", INTEGER); + NamedExpression include = named("age", ref("age", INTEGER)); ReferenceExpression exclude = ref("name", STRING); ReferenceExpression dedupeField = ref("name", STRING); Expression filterExpr = literal(ExprBooleanValue.of(true)); diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/PlannerTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/PlannerTest.java index eddd35493e..33e14cf902 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/PlannerTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/PlannerTest.java @@ -98,15 +98,15 @@ public void plan_a_query_without_relation_involved() { assertPhysicalPlan( PhysicalPlanDSL.project( PhysicalPlanDSL.values(emptyList()), - DSL.literal(123), - DSL.literal("hello"), - DSL.literal(false) + DSL.named("123", DSL.literal(123)), + DSL.named("hello", DSL.literal("hello")), + DSL.named("false", DSL.literal(false)) ), LogicalPlanDSL.project( LogicalPlanDSL.values(emptyList()), - DSL.literal(123), - DSL.literal("hello"), - DSL.literal(false) + DSL.named("123", DSL.literal(123)), + DSL.named("hello", DSL.literal("hello")), + DSL.named("false", DSL.literal(false)) ) ); } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalDedupeTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalDedupeTest.java index ffb8becb04..ce856d3cbe 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalDedupeTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalDedupeTest.java @@ -28,8 +28,16 @@ import com.amazon.opendistroforelasticsearch.sql.analysis.AnalyzerTestBase; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +@Configuration +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = {ExpressionConfig.class, AnalyzerTestBase.class}) class LogicalDedupeTest extends AnalyzerTestBase { @Test public void analyze_dedup_with_two_field_with_default_option() { diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalEvalTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalEvalTest.java index cb985f8d6b..df0c7aa0cf 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalEvalTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalEvalTest.java @@ -21,11 +21,18 @@ import com.amazon.opendistroforelasticsearch.sql.analysis.AnalyzerTestBase; import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; import org.apache.commons.lang3.tuple.ImmutablePair; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +@Configuration +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = {ExpressionConfig.class, AnalyzerTestBase.class}) @ExtendWith(MockitoExtension.class) public class LogicalEvalTest extends AnalyzerTestBase { diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java index 4f9dcb2107..fd360850c6 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java @@ -15,6 +15,7 @@ package com.amazon.opendistroforelasticsearch.sql.planner.logical; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.named; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; @@ -77,7 +78,7 @@ public void testAbstractPlanNodeVisitorShouldReturnNull() { assertNull(rename.accept(new LogicalPlanNodeVisitor() { }, null)); - LogicalPlan project = LogicalPlanDSL.project(relation, ref); + LogicalPlan project = LogicalPlanDSL.project(relation, named("ref", ref)); assertNull(project.accept(new LogicalPlanNodeVisitor() { }, null)); diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalSortTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalSortTest.java index a194fa62e6..15bb012c6f 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalSortTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/logical/LogicalSortTest.java @@ -31,9 +31,17 @@ import com.amazon.opendistroforelasticsearch.sql.analysis.AnalyzerTestBase; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort.SortOption; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; import org.apache.commons.lang3.tuple.ImmutablePair; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +@Configuration +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = {ExpressionConfig.class, AnalyzerTestBase.class}) class LogicalSortTest extends AnalyzerTestBase { @Test public void analyze_sort_with_two_field_with_default_option() { diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperatorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperatorTest.java index b95e039917..b831d3ba53 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperatorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/FilterOperatorTest.java @@ -15,19 +15,31 @@ package com.amazon.opendistroforelasticsearch.sql.planner.physical; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.google.common.collect.ImmutableMap; +import java.util.LinkedHashMap; import java.util.List; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +@ExtendWith(MockitoExtension.class) class FilterOperatorTest extends PhysicalPlanTestBase { + @Mock + private PhysicalPlan inputPlan; @Test public void filterTest() { @@ -40,4 +52,30 @@ public void filterTest() { .of("ip", "209.160.24.63", "action", "GET", "response", 404, "referer", "www.amazon.com")))); } + + @Test + public void nullValueShouldBeenIgnored() { + LinkedHashMap value = new LinkedHashMap<>(); + value.put("response", LITERAL_NULL); + when(inputPlan.hasNext()).thenReturn(true, false); + when(inputPlan.next()).thenReturn(new ExprTupleValue(value)); + + FilterOperator plan = new FilterOperator(inputPlan, + dsl.equal(DSL.ref("response", INTEGER), DSL.literal(404))); + List result = execute(plan); + assertEquals(0, result.size()); + } + + @Test + public void missingValueShouldBeenIgnored() { + LinkedHashMap value = new LinkedHashMap<>(); + value.put("response", LITERAL_MISSING); + when(inputPlan.hasNext()).thenReturn(true, false); + when(inputPlan.next()).thenReturn(new ExprTupleValue(value)); + + FilterOperator plan = new FilterOperator(inputPlan, + dsl.equal(DSL.ref("response", INTEGER), DSL.literal(404))); + List result = execute(plan); + assertEquals(0, result.size()); + } } \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java index 4a6de485cc..2feaf09f7f 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java @@ -17,6 +17,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.named; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; @@ -56,7 +57,7 @@ public void print_physical_plan() { ImmutableList.of(dsl.avg(DSL.ref("response", INTEGER))), ImmutableList.of()), ImmutableMap.of(DSL.ref("ivalue", INTEGER), DSL.ref("avg(response)", DOUBLE))), - ref), + named("ref", ref)), ref); PhysicalPlanPrinter printer = new PhysicalPlanPrinter(); @@ -90,7 +91,7 @@ public void test_PhysicalPlanVisitor_should_return_null() { assertNull(rename.accept(new PhysicalPlanNodeVisitor() { }, null)); - PhysicalPlan project = PhysicalPlanDSL.project(plan, ref); + PhysicalPlan project = PhysicalPlanDSL.project(plan, named("ref", ref)); assertNull(project.accept(new PhysicalPlanNodeVisitor() { }, null)); diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperatorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperatorTest.java index 09d97b19d8..5baf541f62 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperatorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/ProjectOperatorTest.java @@ -15,17 +15,22 @@ package com.amazon.opendistroforelasticsearch.sql.planner.physical; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.stringValue; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlanDSL.project; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.iterableWithSize; import static org.mockito.Mockito.when; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; +import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.google.common.collect.ImmutableMap; import java.util.List; @@ -45,7 +50,7 @@ public void project_one_field() { when(inputPlan.hasNext()).thenReturn(true, false); when(inputPlan.next()) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200))); - PhysicalPlan plan = project(inputPlan, DSL.ref("action", STRING)); + PhysicalPlan plan = project(inputPlan, DSL.named("action", DSL.ref("action", STRING))); List result = execute(plan); assertThat( @@ -60,7 +65,9 @@ public void project_two_field_follow_the_project_order() { when(inputPlan.hasNext()).thenReturn(true, false); when(inputPlan.next()) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200))); - PhysicalPlan plan = project(inputPlan, DSL.ref("response", INTEGER), DSL.ref("action", STRING)); + PhysicalPlan plan = project(inputPlan, + DSL.named("response", DSL.ref("response", INTEGER)), + DSL.named("action", DSL.ref("action", STRING))); List result = execute(plan); assertThat( @@ -72,12 +79,14 @@ public void project_two_field_follow_the_project_order() { } @Test - public void project_ignore_missing_value() { + public void project_keep_missing_value() { when(inputPlan.hasNext()).thenReturn(true, true, false); when(inputPlan.next()) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", 200))) .thenReturn(ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST"))); - PhysicalPlan plan = project(inputPlan, DSL.ref("response", INTEGER), DSL.ref("action", STRING)); + PhysicalPlan plan = project(inputPlan, + DSL.named("response", DSL.ref("response", INTEGER)), + DSL.named("action", DSL.ref("action", STRING))); List result = execute(plan); assertThat( @@ -86,6 +95,20 @@ public void project_ignore_missing_value() { iterableWithSize(2), hasItems( ExprValueUtils.tupleValue(ImmutableMap.of("response", 200, "action", "GET")), - ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST"))))); + ExprTupleValue.fromExprValueMap(ImmutableMap.of("response", + LITERAL_MISSING, + "action", stringValue("POST")))))); + } + + @Test + public void project_schema() { + PhysicalPlan project = project(inputPlan, + DSL.named("response", DSL.ref("response", INTEGER)), + DSL.named("action", DSL.ref("action", STRING))); + + assertThat(project.schema().getColumns(), contains( + new ExecutionEngine.Schema.Column("response", null, INTEGER), + new ExecutionEngine.Schema.Column("action", null, STRING) + )); } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/RemoveOperatorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/RemoveOperatorTest.java index 9d6c9680d3..4962b4c320 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/RemoveOperatorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/physical/RemoveOperatorTest.java @@ -22,6 +22,8 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.iterableWithSize; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.when; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; @@ -117,4 +119,15 @@ public void remove_nothing_with_none_tuple_value() { assertThat(result, allOf(iterableWithSize(1), hasItems(ExprValueUtils.integerValue(1)))); } + + @Test + public void invalid_to_retrieve_schema_from_remove() { + PhysicalPlan plan = remove(inputPlan, DSL.ref("response", STRING), DSL.ref("referer", STRING)); + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> plan.schema()); + assertEquals( + "[BUG] schema can been only applied to ProjectOperator, " + + "instead of RemoveOperator(input=inputPlan, removeList=[response, referer])", + exception.getMessage()); + } } diff --git a/docs/category.json b/docs/category.json index 595121a7d4..684ff35873 100644 --- a/docs/category.json +++ b/docs/category.json @@ -17,6 +17,7 @@ "sql_cli": [ "user/dql/expressions.rst", "user/general/identifiers.rst", + "user/general/values.rst", "user/dql/functions.rst", "user/beyond/partiql.rst" ] diff --git a/docs/experiment/ppl/admin/settings.rst b/docs/experiment/ppl/admin/settings.rst index afcf1428de..17b0eb70f3 100644 --- a/docs/experiment/ppl/admin/settings.rst +++ b/docs/experiment/ppl/admin/settings.rst @@ -16,6 +16,76 @@ Introduction When Elasticsearch bootstraps, PPL plugin will register a few settings in Elasticsearch cluster settings. Most of the settings are able to change dynamically so you can control the behavior of PPL plugin without need to bounce your cluster. +opendistro.ppl.enabled +====================== + +Description +----------- + +You can disable SQL plugin to reject all coming requests. + +1. The default value is true. +2. This setting is node scope. +3. This setting can be updated dynamically. + +Notes. Calls to _opendistro/_ppl include index names in the request body, so they have the same access policy considerations as the bulk, mget, and msearch operations. if rest.action.multi.allow_explicit_index set to false, PPL plugin will be disabled. + +Example 1 +--------- + +You can update the setting with a new value like this. + +PPL query:: + + sh$ curl -sS -H 'Content-Type: application/json' \ + ... -X PUT localhost:9200/_cluster/settings \ + ... -d '{"transient" : {"opendistro.ppl.enabled" : "false"}}' + { + "acknowledged": true, + "persistent": {}, + "transient": { + "opendistro": { + "ppl": { + "enabled": "false" + } + } + } + } + +Example 2 +--------- + +Query result after the setting updated is like: + +PPL query:: + + sh$ curl -sS -H 'Content-Type: application/json' \ + ... -X POST localhost:9200/_opendistro/_ppl \ + { + "error": { + "reason": "Invalid Query", + "details": "Either opendistro.ppl.enabled or rest.action.multi.allow_explicit_index setting is false", + "type": "IllegalAccessException" + }, + "status": 400 + } + +Example 3 +--------- + +You can reset the setting to default value like this. + +PPL query:: + + sh$ curl -sS -H 'Content-Type: application/json' \ + ... -X PUT localhost:9200/_cluster/settings \ + ... -d '{"transient" : {"opendistro.ppl.enabled" : null}}' + { + "acknowledged": true, + "persistent": {}, + "transient": {} + } + opendistro.ppl.query.memory_limit ================================= @@ -46,3 +116,33 @@ PPL query:: "transient": {} } +opendistro.query.size_limit +================================= + +Description +----------- + +The size configure the maximum amount of documents to be pull from Elasticsearch. The default value is: 200 + +Notes: This setting will impact the correctness of the aggregation operation, for example, there are 1000 docs in the index, by default, only 200 docs will be extract from index and do aggregation. + +Example +------- + +PPL query:: + + sh$ curl -sS -H 'Content-Type: application/json' \ + ... -X PUT localhost:9200/_cluster/settings \ + ... -d '{"persistent" : {"opendistro.query.size_limit" : "1000"}}' + { + "acknowledged": true, + "persistent": { + "opendistro": { + "query": { + "size_limit": "1000" + } + } + }, + "transient": {} + } + diff --git a/docs/experiment/ppl/cmd/search.rst b/docs/experiment/ppl/cmd/search.rst index eacc6eb966..d1a943f24f 100644 --- a/docs/experiment/ppl/cmd/search.rst +++ b/docs/experiment/ppl/cmd/search.rst @@ -32,14 +32,14 @@ PPL query:: od> source=accounts; fetched rows / total rows = 4/4 - +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ - | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | - |------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------| - | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | - | 6 | 5686 | Hattie | Bond | 36 | M | 671 Bristol Street | Netagy | hattiebond@netagy.com | Dante | TN | - | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | - | 18 | 4180 | Dale | Adams | 33 | M | 467 Hutchinson Court | null | daleadams@boink.com | Orick | MD | - +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ + | account_number | firstname | address | balance | gender | city | employer | state | age | email | lastname | + |------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------| + | 1 | Amber | 880 Holmes Lane | 39225 | M | Brogan | Pyrami | IL | 32 | amberduke@pyrami.com | Duke | + | 6 | Hattie | 671 Bristol Street | 5686 | M | Dante | Netagy | TN | 36 | hattiebond@netagy.com | Bond | + | 13 | Nanette | 789 Madison Street | 32838 | F | Nogal | Quility | VA | 28 | null | Bates | + | 18 | Dale | 467 Hutchinson Court | 4180 | M | Orick | null | MD | 33 | daleadams@boink.com | Adams | + +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ Example 2: Fetch data with condition ==================================== @@ -50,10 +50,10 @@ PPL query:: od> source=accounts account_number=1 or gender="F"; fetched rows / total rows = 2/2 - +------------------+-----------+-------------+------------+-------+----------+--------------------+------------+----------------------+--------+---------+ - | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | - |------------------+-----------+-------------+------------+-------+----------+--------------------+------------+----------------------+--------+---------| - | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | - | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | - +------------------+-----------+-------------+------------+-------+----------+--------------------+------------+----------------------+--------+---------+ + +------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------+ + | account_number | firstname | address | balance | gender | city | employer | state | age | email | lastname | + |------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------| + | 1 | Amber | 880 Holmes Lane | 39225 | M | Brogan | Pyrami | IL | 32 | amberduke@pyrami.com | Duke | + | 13 | Nanette | 789 Madison Street | 32838 | F | Nogal | Quility | VA | 28 | null | Bates | + +------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------+ diff --git a/docs/experiment/ppl/interfaces/protocol.rst b/docs/experiment/ppl/interfaces/protocol.rst index 05ed849fca..0623823c57 100644 --- a/docs/experiment/ppl/interfaces/protocol.rst +++ b/docs/experiment/ppl/interfaces/protocol.rst @@ -127,7 +127,11 @@ PPL query:: ... -X POST localhost:9200/_opendistro/_ppl \ ... -d '{"query" : "source=unknown | fields firstname, lastname"}' { - "reason": "no such index [unknown]", - "type": "IndexNotFoundException" + "error": { + "reason": "Error occurred in Elasticsearch engine: no such index [unknown]", + "details": "org.elasticsearch.index.IndexNotFoundException: no such index [unknown]\nFor more details, please send request for Json format to see the raw response from elasticsearch engine.", + "type": "IndexNotFoundException" + }, + "status": 404 } diff --git a/docs/user/dql/expressions.rst b/docs/user/dql/expressions.rst index dca1c6463b..8b2fc88dce 100644 --- a/docs/user/dql/expressions.rst +++ b/docs/user/dql/expressions.rst @@ -37,7 +37,7 @@ Here is an example for different type of literals:: od> SELECT 123, 'hello', false, -4.567, DATE '2020-07-07', TIME '01:01:01', TIMESTAMP '2020-07-07 01:01:01'; fetched rows / total rows = 1/1 +-------+-----------+---------+----------+---------------------+-------------------+-----------------------------------+ - | 123 | "hello" | false | -4.567 | DATE '2020-07-07' | TIME '01:01:01' | TIMESTAMP '2020-07-07 01:01:01' | + | 123 | 'hello' | false | -4.567 | DATE '2020-07-07' | TIME '01:01:01' | TIMESTAMP '2020-07-07 01:01:01' | |-------+-----------+---------+----------+---------------------+-------------------+-----------------------------------| | 123 | hello | False | -4.567 | 2020-07-07 | 01:01:01 | 2020-07-07 01:01:01 | +-------+-----------+---------+----------+---------------------+-------------------+-----------------------------------+ @@ -86,12 +86,88 @@ Here is an example for different type of arithmetic expressions:: od> SELECT 1 + 2, (9 - 1) % 3, 2 * 4 / 3; fetched rows / total rows = 1/1 - +---------+-------------+-------------+ - | 1 + 2 | 9 - 1 % 3 | 2 * 4 / 3 | - |---------+-------------+-------------| - | 3 | 2 | 2 | - +---------+-------------+-------------+ + +---------+---------------+-------------+ + | 1 + 2 | (9 - 1) % 3 | 2 * 4 / 3 | + |---------+---------------+-------------| + | 3 | 2 | 2 | + +---------+---------------+-------------+ +Comparison Operators +================================== + +Description +----------- + +Comparison operators are used to compare values. The MISSING and NULL value comparison has following the rule. MISSING value only equal to MISSING value and less than all the other values. NULL value equals to NULL value, large than MISSING value, but less than all the other values. + +Operators +````````` + ++----------------+--------------------------------+ +| name | description | ++----------------+--------------------------------+ +| > | Greater than operator | ++----------------+--------------------------------+ +| >= | Greater than or equal operator | ++----------------+--------------------------------+ +| < | Less than operator | ++----------------+--------------------------------+ +| != | Not equal operator | ++----------------+--------------------------------+ +| <= | Less than or equal operator | ++----------------+--------------------------------+ +| = | Equal operator | ++----------------+--------------------------------+ +| LIKE | Simple pattern matching | ++----------------+--------------------------------+ +| IS NULL | NULL value test | ++----------------+--------------------------------+ +| IS NOT NULL | NOT NULL value test | ++----------------+--------------------------------+ +| IS MISSING | MISSING value test | ++----------------+--------------------------------+ +| IS NOT MISSING | NOT MISSING value test | ++----------------+--------------------------------+ + + +Basic Comparison Operator +------------------------- + +Here is an example for different type of comparison operators:: + + od> SELECT 2 > 1, 2 >= 1, 2 < 1, 2 != 1, 2 <= 1, 2 = 1; + fetched rows / total rows = 1/1 + +---------+----------+---------+----------+----------+---------+ + | 2 > 1 | 2 >= 1 | 2 < 1 | 2 != 1 | 2 <= 1 | 2 = 1 | + |---------+----------+---------+----------+----------+---------| + | True | True | False | True | False | False | + +---------+----------+---------+----------+----------+---------+ + +LIKE +---- + +expr LIKE pattern. The expr is string value, pattern is supports literal text, a percent ( % ) character for a wildcard, and an underscore ( _ ) character for a single character match:: + + od> SELECT 'axyzb' LIKE 'a%b', 'acb' LIKE 'a_b', 'axyzb' NOT LIKE 'a%b', 'acb' NOT LIKE 'a_b'; + fetched rows / total rows = 1/1 + +----------------------+--------------------+--------------------------+------------------------+ + | 'axyzb' LIKE 'a%b' | 'acb' LIKE 'a_b' | 'axyzb' NOT LIKE 'a%b' | 'acb' NOT LIKE 'a_b' | + |----------------------+--------------------+--------------------------+------------------------| + | True | True | False | False | + +----------------------+--------------------+--------------------------+------------------------+ + +NULL value test +--------------- + +Here is an example for null value test:: + + od> SELECT 0 IS NULL, 0 IS NOT NULL, NULL IS NULL, NULL IS NOT NULL; + fetched rows / total rows = 1/1 + +-------------+-----------------+----------------+--------------------+ + | 0 IS NULL | 0 IS NOT NULL | NULL IS NULL | NULL IS NOT NULL | + |-------------+-----------------+----------------+--------------------| + | False | True | True | False | + +-------------+-----------------+----------------+--------------------+ Function Call ============= diff --git a/docs/user/dql/functions.rst b/docs/user/dql/functions.rst index a8d97fde7c..85ed109425 100644 --- a/docs/user/dql/functions.rst +++ b/docs/user/dql/functions.rst @@ -44,7 +44,7 @@ Example:: od> SELECT ACOS(0) fetched rows / total rows = 1/1 +--------------------+ - | acos(0) | + | ACOS(0) | |--------------------| | 1.5707963267948966 | +--------------------+ @@ -89,7 +89,7 @@ Example:: od> SELECT ASIN(0) fetched rows / total rows = 1/1 +-----------+ - | asin(0) | + | ASIN(0) | |-----------| | 0 | +-----------+ @@ -112,7 +112,7 @@ Example:: od> SELECT ATAN(2), ATAN(2, 3) fetched rows / total rows = 1/1 +--------------------+--------------------+ - | atan(2) | atan(2, 3) | + | ATAN(2) | ATAN(2, 3) | |--------------------+--------------------| | 1.1071487177940904 | 0.5880026035475675 | +--------------------+--------------------+ @@ -135,7 +135,7 @@ Example:: od> SELECT ATAN2(2, 3) fetched rows / total rows = 1/1 +--------------------+ - | atan2(2, 3) | + | ATAN2(2, 3) | |--------------------| | 0.5880026035475675 | +--------------------+ @@ -205,7 +205,7 @@ Example:: od> SELECT CONV('12', 10, 16), CONV('2C', 16, 10), CONV(12, 10, 2), CONV(1111, 2, 10) fetched rows / total rows = 1/1 +----------------------+----------------------+-------------------+---------------------+ - | conv("12", 10, 16) | conv("2C", 16, 10) | conv(12, 10, 2) | conv(1111, 2, 10) | + | CONV('12', 10, 16) | CONV('2C', 16, 10) | CONV(12, 10, 2) | CONV(1111, 2, 10) | |----------------------+----------------------+-------------------+---------------------| | c | 44 | 1100 | 15 | +----------------------+----------------------+-------------------+---------------------+ @@ -227,7 +227,7 @@ Example:: od> SELECT COS(0) fetched rows / total rows = 1/1 +----------+ - | cos(0) | + | COS(0) | |----------| | 1 | +----------+ @@ -261,7 +261,7 @@ Example:: od> SELECT COT(1) fetched rows / total rows = 1/1 +--------------------+ - | cot(1) | + | COT(1) | |--------------------| | 0.6420926159343306 | +--------------------+ @@ -284,7 +284,7 @@ Example:: od> SELECT CRC32('MySQL') fetched rows / total rows = 1/1 +------------------+ - | crc32("MySQL") | + | CRC32('MySQL') | |------------------| | 3259397556 | +------------------+ @@ -352,7 +352,7 @@ Example:: od> SELECT DEGREES(1.57) fetched rows / total rows = 1/1 +-------------------+ - | degrees(1.57) | + | DEGREES(1.57) | |-------------------| | 89.95437383553924 | +-------------------+ @@ -384,7 +384,7 @@ Example:: od> SELECT E() fetched rows / total rows = 1/1 +-------------------+ - | e() | + | E() | |-------------------| | 2.718281828459045 | +-------------------+ @@ -586,7 +586,7 @@ Example:: od> SELECT MOD(3, 2), MOD(3.1, 2) fetched rows / total rows = 1/1 +-------------+---------------+ - | mod(3, 2) | mod(3.1, 2) | + | MOD(3, 2) | MOD(3.1, 2) | |-------------+---------------| | 1 | 1.1 | +-------------+---------------+ @@ -651,7 +651,7 @@ Example:: od> SELECT PI() fetched rows / total rows = 1/1 +-------------------+ - | pi() | + | PI() | |-------------------| | 3.141592653589793 | +-------------------+ @@ -674,7 +674,7 @@ Example:: od> SELECT POW(3, 2), POW(-3, 2), POW(3, -2) fetched rows / total rows = 1/1 +-------------+--------------+--------------------+ - | pow(3, 2) | pow(-3, 2) | pow(3, -2) | + | POW(3, 2) | POW(-3, 2) | POW(3, -2) | |-------------+--------------+--------------------| | 9 | 9 | 0.1111111111111111 | +-------------+--------------+--------------------+ @@ -697,7 +697,7 @@ Example:: od> SELECT POWER(3, 2), POWER(-3, 2), POWER(3, -2) fetched rows / total rows = 1/1 +---------------+----------------+--------------------+ - | power(3, 2) | power(-3, 2) | power(3, -2) | + | POWER(3, 2) | POWER(-3, 2) | POWER(3, -2) | |---------------+----------------+--------------------| | 9 | 9 | 0.1111111111111111 | +---------------+----------------+--------------------+ @@ -720,7 +720,7 @@ Example:: od> SELECT RADIANS(90) fetched rows / total rows = 1/1 +--------------------+ - | radians(90) | + | RADIANS(90) | |--------------------| | 1.5707963267948966 | +--------------------+ @@ -743,7 +743,7 @@ Example:: od> SELECT RAND(3) fetched rows / total rows = 1/1 +------------+ - | rand(3) | + | RAND(3) | |------------| | 0.73105735 | +------------+ @@ -802,7 +802,7 @@ Example:: od> SELECT ROUND(12.34), ROUND(12.34, 1), ROUND(12.34, -1), ROUND(12, 1) fetched rows / total rows = 1/1 +----------------+-------------------+--------------------+----------------+ - | round(12.34) | round(12.34, 1) | round(12.34, -1) | round(12, 1) | + | ROUND(12.34) | ROUND(12.34, 1) | ROUND(12.34, -1) | ROUND(12, 1) | |----------------+-------------------+--------------------+----------------| | 12 | 12.3 | 10 | 12 | +----------------+-------------------+--------------------+----------------+ @@ -836,7 +836,7 @@ Example:: od> SELECT SIGN(1), SIGN(0), SIGN(-1.1) fetched rows / total rows = 1/1 +-----------+-----------+--------------+ - | sign(1) | sign(0) | sign(-1.1) | + | SIGN(1) | SIGN(0) | SIGN(-1.1) | |-----------+-----------+--------------| | 1 | 0 | -1 | +-----------+-----------+--------------+ @@ -870,7 +870,7 @@ Example:: od> SELECT SIN(0) fetched rows / total rows = 1/1 +----------+ - | sin(0) | + | SIN(0) | |----------| | 0 | +----------+ @@ -907,7 +907,7 @@ Example:: od> SELECT SQRT(4), SQRT(4.41) fetched rows / total rows = 1/1 +-----------+--------------+ - | sqrt(4) | sqrt(4.41) | + | SQRT(4) | SQRT(4.41) | |-----------+--------------| | 2 | 2.1 | +-----------+--------------+ @@ -952,7 +952,7 @@ Example:: od> SELECT TAN(0) fetched rows / total rows = 1/1 +----------+ - | tan(0) | + | TAN(0) | |----------| | 0 | +----------+ @@ -999,7 +999,7 @@ Example:: fetched rows / total rows = 1/1 +----------------------+-----------------------+-------------------+ - | truncate(56.78, 1) | truncate(56.78, -1) | truncate(56, 1) | + | TRUNCATE(56.78, 1) | TRUNCATE(56.78, -1) | TRUNCATE(56, 1) | |----------------------+-----------------------+-------------------| | 56.7 | 50 | 56 | +----------------------+-----------------------+-------------------+ diff --git a/docs/user/general/identifiers.rst b/docs/user/general/identifiers.rst index 5dbee26131..16b6a105c7 100644 --- a/docs/user/general/identifiers.rst +++ b/docs/user/general/identifiers.rst @@ -40,14 +40,14 @@ Here are examples for using index pattern directly without quotes:: od> SELECT * FROM *cc*nt*; fetched rows / total rows = 4/4 - +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ - | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | - |------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------| - | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | - | 6 | 5686 | Hattie | Bond | 36 | M | 671 Bristol Street | Netagy | hattiebond@netagy.com | Dante | TN | - | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | - | 18 | 4180 | Dale | Adams | 33 | M | 467 Hutchinson Court | null | daleadams@boink.com | Orick | MD | - +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ + | account_number | firstname | address | balance | gender | city | employer | state | age | email | lastname | + |------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------| + | 1 | Amber | 880 Holmes Lane | 39225 | M | Brogan | Pyrami | IL | 32 | amberduke@pyrami.com | Duke | + | 6 | Hattie | 671 Bristol Street | 5686 | M | Dante | Netagy | TN | 36 | hattiebond@netagy.com | Bond | + | 13 | Nanette | 789 Madison Street | 32838 | F | Nogal | Quility | VA | 28 | null | Bates | + | 18 | Dale | 467 Hutchinson Court | 4180 | M | Orick | null | MD | 33 | daleadams@boink.com | Adams | + +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ Delimited Identifiers @@ -76,14 +76,14 @@ Here are examples for quoting an index name by back ticks:: od> SELECT * FROM `accounts`; fetched rows / total rows = 4/4 - +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ - | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | - |------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------| - | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | - | 6 | 5686 | Hattie | Bond | 36 | M | 671 Bristol Street | Netagy | hattiebond@netagy.com | Dante | TN | - | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | - | 18 | 4180 | Dale | Adams | 33 | M | 467 Hutchinson Court | null | daleadams@boink.com | Orick | MD | - +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ + | account_number | firstname | address | balance | gender | city | employer | state | age | email | lastname | + |------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------| + | 1 | Amber | 880 Holmes Lane | 39225 | M | Brogan | Pyrami | IL | 32 | amberduke@pyrami.com | Duke | + | 6 | Hattie | 671 Bristol Street | 5686 | M | Dante | Netagy | TN | 36 | hattiebond@netagy.com | Bond | + | 13 | Nanette | 789 Madison Street | 32838 | F | Nogal | Quility | VA | 28 | null | Bates | + | 18 | Dale | 467 Hutchinson Court | 4180 | M | Orick | null | MD | 33 | daleadams@boink.com | Adams | + +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ Case Sensitivity diff --git a/docs/user/general/values.rst b/docs/user/general/values.rst new file mode 100644 index 0000000000..fead02a136 --- /dev/null +++ b/docs/user/general/values.rst @@ -0,0 +1,91 @@ +========== +Data Types +========== + +.. rubric:: Table of contents + +.. contents:: + :local: + :depth: 2 + + +NULL and MISSING Values +======================= +ODFE SQL has two ways to represent missing information. (1) The presence of the field with a NULL for its value. and (2) the absence of the filed. + +Please note, when response is in table format, the MISSING value is translate to NULL value. + +Here is an example, Nanette doesn't have email field and Dail has employer filed with NULL value:: + + od> SELECT firstname, employer, email FROM accounts; + fetched rows / total rows = 4/4 + +-------------+------------+-----------------------+ + | firstname | employer | email | + |-------------+------------+-----------------------| + | Amber | Pyrami | amberduke@pyrami.com | + | Hattie | Netagy | hattiebond@netagy.com | + | Nanette | Quility | null | + | Dale | null | daleadams@boink.com | + +-------------+------------+-----------------------+ + + +General NULL and MISSING Values Handling +---------------------------------------- +In general, if any operand evaluates to a MISSING value, the enclosing operator will return MISSING; if none of operands evaluates to a MISSING value but there is an operand evaluates to a NULL value, the enclosing operator will return NULL. + +Here is an example:: + + od> SELECT firstname, employer LIKE 'Quility', email LIKE '%com' FROM accounts; + fetched rows / total rows = 4/4 + +-------------+---------------------------+---------------------+ + | firstname | employer LIKE 'Quility' | email LIKE '%com' | + |-------------+---------------------------+---------------------| + | Amber | False | True | + | Hattie | False | True | + | Nanette | True | null | + | Dale | null | True | + +-------------+---------------------------+---------------------+ + +Special NULL and MISSING Values Handling +---------------------------------------- +THe AND, OR and NOT have special logic to handling NULL and MISSING value. + +The following table is the truth table for AND and OR. + ++---------+---------+---------+---------+ +| A | B | A AND B | A OR B | ++---------+---------+---------+---------+ +| TRUE | TRUE | TRUE | TRUE | ++---------+---------+---------+---------+ +| TRUE | FALSE | FALSE | TRUE | ++---------+---------+---------+---------+ +| TRUE | NULL | NULL | TRUE | ++---------+---------+---------+---------+ +| TRUE | MISSING | MISSING | TRUE | ++---------+---------+---------+---------+ +| FALSE | FALSE | FALSE | FALSE | ++---------+---------+---------+---------+ +| FALSE | NULL | FALSE | NULL | ++---------+---------+---------+---------+ +| FALSE | MISSING | FALSE | MISSING | ++---------+---------+---------+---------+ +| NULL | NULL | NULL | NULL | ++---------+---------+---------+---------+ +| NULL | MISSING | MISSING | NULL | ++---------+---------+---------+---------+ +| MISSING | MISSING | MISSING | MISSING | ++---------+---------+---------+---------+ + +The following table is the truth table for NOT. + ++---------+---------+ +| A | NOT A | ++---------+---------+ +| TRUE | FALSE | ++---------+---------+ +| FALSE | TRUE | ++---------+---------+ +| NULL | NULL | ++---------+---------+ +| MISSING | MISSING | ++---------+---------+ \ No newline at end of file diff --git a/docs/user/index.rst b/docs/user/index.rst index d5f668e9f3..39d087297c 100644 --- a/docs/user/index.rst +++ b/docs/user/index.rst @@ -20,6 +20,7 @@ Open Distro for Elasticsearch SQL enables you to extract insights out of Elastic * **Language Structure** - `Identifiers `_ + - `Data Types `_ * **Data Query Language** @@ -48,3 +49,7 @@ Open Distro for Elasticsearch SQL enables you to extract insights out of Elastic - `Troubleshooting `_ +* **Limitations** + + - `Limitations `_ + diff --git a/docs/user/limitations/limitations.rst b/docs/user/limitations/limitations.rst new file mode 100644 index 0000000000..15e43a750d --- /dev/null +++ b/docs/user/limitations/limitations.rst @@ -0,0 +1,98 @@ + +=========== +Limitations +=========== + +.. rubric:: Table of contents + +.. contents:: + :local: + :depth: 2 + + +Introduction +============ + +In this doc, the restrictions and limitations of SQL plugin is covered as follows. + + +Limitations on Aggregations +=========================== + +Aggregation over expression is not supported for now. You can only apply aggregation on fields, aggregations can't accept an expression as a parameter. For example, `avg(log(age))` is not supported. + +Here's a link to the Github issue - [Issue #288](https://github.com/opendistro-for-elasticsearch/sql/issues/288). + + +Limitations on Subqueries +========================= + +Subqueries in the FROM clause +----------------------------- + +Subquery in the `FROM` clause in this format: `SELECT outer FROM (SELECT inner)` is supported only when the query is merged into one query. For example, the following query is supported:: + + SELECT t.f, t.d + FROM ( + SELECT FlightNum as f, DestCountry as d + FROM kibana_sample_data_flights + WHERE OriginCountry = 'US') t + +But, if the outer query has `GROUP BY` or `ORDER BY`, then it's not supported. + + +Limitations on JOINs +==================== + +JOIN does not support aggregations on the joined result. The `join` query does not support aggregations on the joined result. +For example, e.g. `SELECT depo.name, avg(empo.age) FROM empo JOIN depo WHERE empo.id == depo.id GROUP BY depo.name` is not supported. + +Here's a link to the Github issue - [Issue 110](https://github.com/opendistro-for-elasticsearch/sql/issues/110). + + +Limitations on Pagination +========================= + +Pagination only supports basic queries for now. The pagination query enables you to get back paginated responses. +Currently, the pagination only supports basic queries. For example, the following query returns the data with cursor id:: + + POST _opendistro/_sql/ + { + "fetch_size" : 5, + "query" : "SELECT OriginCountry, DestCountry FROM kibana_sample_data_flights ORDER BY OriginCountry ASC" + } + +The response in JDBC format with cursor id:: + + { + "schema": [ + { + "name": "OriginCountry", + "type": "keyword" + }, + { + "name": "DestCountry", + "type": "keyword" + } + ], + "cursor": "d:eyJhIjp7fSwicyI6IkRYRjFaWEo1UVc1a1JtVjBZMmdCQUFBQUFBQUFCSllXVTJKVU4yeExiWEJSUkhsNFVrdDVXVEZSYkVKSmR3PT0iLCJjIjpbeyJuYW1lIjoiT3JpZ2luQ291bnRyeSIsInR5cGUiOiJrZXl3b3JkIn0seyJuYW1lIjoiRGVzdENvdW50cnkiLCJ0eXBlIjoia2V5d29yZCJ9XSwiZiI6MSwiaSI6ImtpYmFuYV9zYW1wbGVfZGF0YV9mbGlnaHRzIiwibCI6MTMwNTh9", + "total": 13059, + "datarows": [[ + "AE", + "CN" + ]], + "size": 1, + "status": 200 + } + +The query with `aggregation` and `join` does not support pagination for now. + + +Limitations on Query Optimizations +================================== + +Multi-fields in WHERE Conditions +-------------------------------- + +The filter expressions in ``WHERE`` clause may be pushed down to Elasticsearch DSL queries to avoid large amounts of data retrieved. In this case, for Elasticsearch multi-field (a text field with another keyword field inside), assumption is made that the keyword field name is always "keyword" which is true by default. + diff --git a/doctest/test_data/accounts.json b/doctest/test_data/accounts.json index 2ea2b602fd..3ff2b7b3c7 100644 --- a/doctest/test_data/accounts.json +++ b/doctest/test_data/accounts.json @@ -1,4 +1,4 @@ {"account_number":1,"balance":39225,"firstname":"Amber","lastname":"Duke","age":32,"gender":"M","address":"880 Holmes Lane","employer":"Pyrami","email":"amberduke@pyrami.com","city":"Brogan","state":"IL"} {"account_number":6,"balance":5686,"firstname":"Hattie","lastname":"Bond","age":36,"gender":"M","address":"671 Bristol Street","employer":"Netagy","email":"hattiebond@netagy.com","city":"Dante","state":"TN"} -{"account_number":13,"balance":32838,"firstname":"Nanette","lastname":"Bates","age":28,"gender":"F","address":"789 Madison Street","employer":"Quility","email":null,"city":"Nogal","state":"VA"} +{"account_number":13,"balance":32838,"firstname":"Nanette","lastname":"Bates","age":28,"gender":"F","address":"789 Madison Street","employer":"Quility","city":"Nogal","state":"VA"} {"account_number":18,"balance":4180,"firstname":"Dale","lastname":"Adams","age":33,"gender":"M","address":"467 Hutchinson Court","employer":null,"email":"daleadams@boink.com","city":"Orick","state":"MD"} diff --git a/elasticsearch/build.gradle b/elasticsearch/build.gradle index 9adb955957..7aeebc5ab6 100644 --- a/elasticsearch/build.gradle +++ b/elasticsearch/build.gradle @@ -14,6 +14,7 @@ dependencies { compile "io.github.resilience4j:resilience4j-retry:1.5.0" compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.10.4' compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.4' + compile group: 'org.json', name: 'json', version:'20180813' compileOnly group: 'org.elasticsearch.client', name: 'elasticsearch-rest-high-level-client', version: "${es_version}" testImplementation('org.junit.jupiter:junit-jupiter:5.6.2') diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClient.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClient.java index b5df2af5cf..03e21d99e1 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClient.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClient.java @@ -28,7 +28,6 @@ import java.util.function.Predicate; import lombok.RequiredArgsConstructor; import org.apache.logging.log4j.ThreadContext; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; @@ -88,23 +87,15 @@ public Map getIndexMappings(String indexExpression) { /** TODO: Scroll doesn't work for aggregation. Support aggregation later. */ @Override public ElasticsearchResponse search(ElasticsearchRequest request) { - SearchResponse esResponse; - if (request.isScrollStarted()) { - esResponse = client.searchScroll(request.scrollRequest()).actionGet(); - } else { - esResponse = client.search(request.searchRequest()).actionGet(); - } - request.setScrollId(esResponse.getScrollId()); - - return new ElasticsearchResponse(esResponse); + return request.search( + req -> client.search(req).actionGet(), + req -> client.searchScroll(req).actionGet() + ); } @Override public void cleanup(ElasticsearchRequest request) { - if (request.isScrollStarted()) { - client.prepareClearScroll().addScrollId(request.getScrollId()).get(); - request.reset(); - } + request.clean(scrollId -> client.prepareClearScroll().addScrollId(scrollId).get()); } @Override diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClient.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClient.java index ec66dbca75..7a068b38c0 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClient.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClient.java @@ -24,7 +24,6 @@ import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import org.elasticsearch.action.search.ClearScrollRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.indices.GetMappingsRequest; @@ -55,38 +54,39 @@ public Map getIndexMappings(String indexExpression) { @Override public ElasticsearchResponse search(ElasticsearchRequest request) { - try { - SearchResponse esResponse; - if (request.isScrollStarted()) { - esResponse = client.scroll(request.scrollRequest(), RequestOptions.DEFAULT); - } else { - esResponse = client.search(request.searchRequest(), RequestOptions.DEFAULT); - } - request.setScrollId(esResponse.getScrollId()); - - return new ElasticsearchResponse(esResponse); - } catch (IOException e) { - throw new IllegalStateException( - "Failed to perform search operation with request " + request, e); - } + return request.search( + req -> { + try { + return client.search(req, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new IllegalStateException( + "Failed to perform search operation with request " + req, e); + } + }, + req -> { + try { + return client.scroll(req, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new IllegalStateException( + "Failed to perform scroll operation with request " + req, e); + } + } + ); } @Override public void cleanup(ElasticsearchRequest request) { - try { - if (!request.isScrollStarted()) { - return; + request.clean(scrollId -> { + try { + ClearScrollRequest clearRequest = new ClearScrollRequest(); + clearRequest.addScrollId(scrollId); + client.clearScroll(clearRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); } + }); - ClearScrollRequest clearRequest = new ClearScrollRequest(); - clearRequest.addScrollId(request.getScrollId()); - client.clearScroll(clearRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } finally { - request.reset(); - } } @Override diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextKeywordValue.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextKeywordValue.java new file mode 100644 index 0000000000..d1c66e75c9 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextKeywordValue.java @@ -0,0 +1,40 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value; + +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprStringValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; + +/** + * Expression Text Keyword Value, it is an extension of the ExprValue by Elasticsearch. + * This mostly represents a multi-field in Elasticsearch which has a text field and a + * keyword field inside to preserve the original text. + */ +public class ElasticsearchExprTextKeywordValue extends ExprStringValue { + + public ElasticsearchExprTextKeywordValue(String value) { + super(value); + } + + @Override + public ExprType type() { + return ES_TEXT_KEYWORD; + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactory.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactory.java index 82832ac034..db05161ef9 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactory.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactory.java @@ -28,6 +28,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRUCT; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIMESTAMP; import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchDateFormatters.SQL_LITERAL_DATE_TIME_FORMAT; import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchDateFormatters.STRICT_DATE_OPTIONAL_TIME_FORMATTER; @@ -45,7 +46,6 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.JsonNodeType; import java.time.Instant; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; @@ -95,25 +95,31 @@ private ExprValue construct(String field, JsonNode value) { ExprType type = type(field); if (type.equals(INTEGER)) { - return constructInteger(value); + return constructInteger(value.intValue()); } else if (type.equals(LONG)) { - return constructLong(value); + return constructLong(value.longValue()); } else if (type.equals(FLOAT)) { - return constructFloat(value); + return constructFloat(value.floatValue()); } else if (type.equals(DOUBLE)) { - return constructDouble(value); + return constructDouble(value.doubleValue()); } else if (type.equals(STRING)) { - return constructString(value); + return constructString(value.textValue()); } else if (type.equals(BOOLEAN)) { - return constructBoolean(value); + return constructBoolean(value.booleanValue()); } else if (type.equals(STRUCT)) { return constructStruct(value, field); } else if (type.equals(ARRAY)) { return constructArray(value, field); } else if (type.equals(TIMESTAMP)) { - return constructTimestamp(value); + if (value.isNumber()) { + return constructTimestamp(value.longValue()); + } else { + return constructTimestamp(value.asText()); + } } else if (type.equals(ES_TEXT)) { return new ElasticsearchExprTextValue(value.asText()); + } else if (type.equals(ES_TEXT_KEYWORD)) { + return new ElasticsearchExprTextKeywordValue(value.asText()); } else { throw new IllegalStateException( String.format( @@ -121,6 +127,51 @@ private ExprValue construct(String field, JsonNode value) { } } + /** + * Construct ExprValue from field and its value object. Throw exception if trying + * to construct from field of unsupported type. + * + * @param field field name + * @param value value object + * @return ExprValue + */ + public ExprValue construct(String field, Object value) { + if (value == null) { + return nullValue(); + } + + ExprType type = type(field); + if (type.equals(INTEGER)) { + return constructInteger((Integer) value); + } else if (type.equals(LONG)) { + return constructLong((Long) value); + } else if (type.equals(FLOAT)) { + return constructFloat((Float) value); + } else if (type.equals(DOUBLE)) { + return constructDouble((Double) value); + } else if (type.equals(STRING)) { + return constructString((String) value); + } else if (type.equals(BOOLEAN)) { + return constructBoolean((Boolean) value); + } else if (type.equals(TIMESTAMP)) { + if (value instanceof Number) { + return constructTimestamp((Long) value); + } else if (value instanceof Instant) { + return constructTimestamp((Instant) value); + } else { + return constructTimestamp(String.valueOf(value)); + } + } else if (type.equals(ES_TEXT)) { + return new ElasticsearchExprTextValue((String) value); + } else if (type.equals(ES_TEXT_KEYWORD)) { + return new ElasticsearchExprTextKeywordValue((String) value); + } else { + throw new IllegalStateException(String.format( + "Unsupported type %s to construct expression value from object for " + + "field: %s, value: %s.", type.typeName(), field, value)); + } + } + private ExprType type(String field) { if (typeMapping.containsKey(field)) { return typeMapping.get(field); @@ -129,28 +180,36 @@ private ExprType type(String field) { } } - private ExprIntegerValue constructInteger(JsonNode value) { - return new ExprIntegerValue(value.intValue()); + private ExprIntegerValue constructInteger(Integer value) { + return new ExprIntegerValue(value); + } + + private ExprLongValue constructLong(Long value) { + return new ExprLongValue(value); } - private ExprLongValue constructLong(JsonNode value) { - return new ExprLongValue(value.longValue()); + private ExprFloatValue constructFloat(Float value) { + return new ExprFloatValue(value); } - private ExprFloatValue constructFloat(JsonNode value) { - return new ExprFloatValue(value.floatValue()); + private ExprDoubleValue constructDouble(Double value) { + return new ExprDoubleValue(value); } - private ExprDoubleValue constructDouble(JsonNode value) { - return new ExprDoubleValue(value.doubleValue()); + private ExprStringValue constructString(String value) { + return new ExprStringValue(value); } - private ExprStringValue constructString(JsonNode value) { - return new ExprStringValue(value.textValue()); + private ExprBooleanValue constructBoolean(Boolean value) { + return ExprBooleanValue.of(value); } - private ExprBooleanValue constructBoolean(JsonNode value) { - return ExprBooleanValue.of(value.booleanValue()); + private ExprValue constructTimestamp(Long value) { + return constructTimestamp(Instant.ofEpochMilli(value)); + } + + private ExprValue constructTimestamp(Instant instant) { + return new ExprTimestampValue(instant); } /** @@ -158,19 +217,15 @@ private ExprBooleanValue constructBoolean(JsonNode value) { * https://www.elastic.co/guide/en/elasticsearch/reference/current/date.html * The customized date_format is not supported. */ - private ExprValue constructTimestamp(JsonNode value) { + private ExprValue constructTimestamp(String value) { try { - if (value.getNodeType().equals(JsonNodeType.NUMBER)) { - return new ExprTimestampValue(Instant.ofEpochMilli(value.asLong())); - } else { - return new ExprTimestampValue( - // Using Elasticsearch DateFormatters for now. - DateFormatters.from(DATE_TIME_FORMATTER.parse(value.asText())).toInstant()); - } + return new ExprTimestampValue( + // Using Elasticsearch DateFormatters for now. + DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); } catch (DateTimeParseException e) { throw new IllegalStateException( String.format( - "Construct ExprTimestampValue from %s failed, unsupported date format.", value), + "Construct ExprTimestampValue from \"%s\" failed, unsupported date format.", value), e); } } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngine.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngine.java index b3caa33b83..97163f7e5b 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngine.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngine.java @@ -47,7 +47,7 @@ public void execute(PhysicalPlan physicalPlan, ResponseListener l result.add(plan.next()); } - QueryResponse response = new QueryResponse(result); + QueryResponse response = new QueryResponse(physicalPlan.schema(), result); listener.onResponse(response); } catch (Exception e) { listener.onFailure(e); diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java index db82cb3b7d..6b8942b9b1 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java @@ -23,7 +23,6 @@ import com.amazon.opendistroforelasticsearch.sql.planner.physical.EvalOperator; import com.amazon.opendistroforelasticsearch.sql.planner.physical.FilterOperator; import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; -import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlanNodeVisitor; import com.amazon.opendistroforelasticsearch.sql.planner.physical.ProjectOperator; import com.amazon.opendistroforelasticsearch.sql.planner.physical.RemoveOperator; import com.amazon.opendistroforelasticsearch.sql.planner.physical.RenameOperator; diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/mapping/IndexMapping.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/mapping/IndexMapping.java index db176f1975..4440d9854d 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/mapping/IndexMapping.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/mapping/IndexMapping.java @@ -93,10 +93,15 @@ private void flatMappings( (fieldName, mappingObject) -> { Map mapping = (Map) mappingObject; String fullFieldName = path.isEmpty() ? fieldName : path + "." + fieldName; - String type = (String) mapping.getOrDefault("type", "object"); - func.accept(fullFieldName, type); - if (mapping.containsKey("fields")) { // Multi-field + if (isMultiField(mapping)) { + func.accept(fullFieldName, "text_keyword"); + } else { + String type = (String) mapping.getOrDefault("type", "object"); + func.accept(fullFieldName, type); + } + + if (isMultiField(mapping)) { ((Map>) mapping.get("fields")) .forEach( (innerFieldName, innerMapping) -> @@ -110,4 +115,9 @@ private void flatMappings( } }); } + + private boolean isMultiField(Map mapping) { + return mapping.containsKey("fields"); + } + } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchQueryRequest.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchQueryRequest.java new file mode 100644 index 0000000000..a599b63ec8 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchQueryRequest.java @@ -0,0 +1,104 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.request; + +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; +import com.google.common.annotations.VisibleForTesting; +import java.util.function.Consumer; +import java.util.function.Function; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.ToString; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +/** + * Elasticsearch search request. This has to be stateful because it needs to: + * + *

    1) Accumulate search source builder when visiting logical plan to push down operation. 2) + * Indicate the search already done. + */ +@EqualsAndHashCode +@Getter +@ToString +public class ElasticsearchQueryRequest implements ElasticsearchRequest { + + /** + * Default query timeout in minutes. + */ + public static final TimeValue DEFAULT_QUERY_TIMEOUT = TimeValue.timeValueMinutes(1L); + + /** + * Index name. + */ + private final String indexName; + + /** + * Search request source builder. + */ + private final SearchSourceBuilder sourceBuilder; + + /** + * Indicate the search already done. + */ + private boolean searchDone = false; + + /** + * Constructor of ElasticsearchQueryRequest. + */ + public ElasticsearchQueryRequest(String indexName, int size) { + this.indexName = indexName; + this.sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.from(0); + sourceBuilder.size(size); + sourceBuilder.timeout(DEFAULT_QUERY_TIMEOUT); + + } + + @Override + public ElasticsearchResponse search(Function searchAction, + Function scrollAction) { + if (searchDone) { + return new ElasticsearchResponse(SearchHits.empty()); + } else { + searchDone = true; + return new ElasticsearchResponse(searchAction.apply(searchRequest())); + } + } + + @Override + public void clean(Consumer cleanAction) { + //do nothing. + } + + /** + * Generate Elasticsearch search request. + * + * @return search request + */ + @VisibleForTesting + protected SearchRequest searchRequest() { + return new SearchRequest() + .indices(indexName) + .source(sourceBuilder); + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchRequest.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchRequest.java index 7f2a151044..60f851b100 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchRequest.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchRequest.java @@ -1,11 +1,12 @@ /* + * * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either @@ -16,80 +17,40 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.request; -import java.util.Objects; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import lombok.Setter; -import lombok.ToString; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; +import java.util.function.Consumer; +import java.util.function.Function; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.builder.SearchSourceBuilder; /** - * Elasticsearch search request. This has to be stateful because it needs to: - * - *

    1) Accumulate search source builder when visiting logical plan to push down operation 2) - * Maintain scroll ID between calls to client search method + * Elasticsearch search request. */ -@EqualsAndHashCode -@RequiredArgsConstructor -@Getter -@ToString -public class ElasticsearchRequest { - - /** Default scroll context timeout in minutes. */ - public static final TimeValue DEFAULT_SCROLL_TIMEOUT = TimeValue.timeValueMinutes(1L); - - /** Index name. */ - private final String indexName; +public interface ElasticsearchRequest { /** - * Scroll id which is set after first request issued. Because ElasticsearchClient is shared by - * multi-thread so this state has to be maintained here. - */ - @Setter private String scrollId; - - /** Search request source builder. */ - private final SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - - /** - * Generate Elasticsearch search request. + * Apply the search action or scroll action on request based on context. * - * @return search request + * @param searchAction search action. + * @param scrollAction scroll search action. + * @return ElasticsearchResponse. */ - public SearchRequest searchRequest() { - return new SearchRequest() - .indices(indexName) - .scroll(DEFAULT_SCROLL_TIMEOUT) - .source(sourceBuilder); - } + ElasticsearchResponse search(Function searchAction, + Function scrollAction); /** - * Is scroll started which means pages after first is being requested. + * Apply the cleanAction on request. * - * @return true if scroll started + * @param cleanAction clean action. */ - public boolean isScrollStarted() { - return (scrollId != null); - } + void clean(Consumer cleanAction); /** - * Generate Elasticsearch scroll request by scroll id maintained. + * Get the SearchSourceBuilder. * - * @return scroll request - */ - public SearchScrollRequest scrollRequest() { - Objects.requireNonNull(scrollId, "Scroll id cannot be null"); - return new SearchScrollRequest().scroll(DEFAULT_SCROLL_TIMEOUT).scrollId(scrollId); - } - - /** - * Reset internal state in case any stale data. However, ideally the same instance is not supposed - * to be reused across different physical plan. + * @return SearchSourceBuilder. */ - public void reset() { - scrollId = null; - } + SearchSourceBuilder getSourceBuilder(); } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchScrollRequest.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchScrollRequest.java new file mode 100644 index 0000000000..87cb9ae2fd --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchScrollRequest.java @@ -0,0 +1,125 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.request; + +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; +import java.util.Objects; +import java.util.function.Consumer; +import java.util.function.Function; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.Setter; +import lombok.ToString; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +/** + * Elasticsearch scroll search request. This has to be stateful because it needs to: + * + *

    1) Accumulate search source builder when visiting logical plan to push down operation 2) + * Maintain scroll ID between calls to client search method + */ +@EqualsAndHashCode +@RequiredArgsConstructor +@Getter +@ToString +public class ElasticsearchScrollRequest implements ElasticsearchRequest { + + /** Default scroll context timeout in minutes. */ + public static final TimeValue DEFAULT_SCROLL_TIMEOUT = TimeValue.timeValueMinutes(1L); + + /** Index name. */ + private final String indexName; + + /** + * Scroll id which is set after first request issued. Because ElasticsearchClient is shared by + * multi-thread so this state has to be maintained here. + */ + @Setter private String scrollId; + + /** Search request source builder. */ + private final SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + + + @Override + public ElasticsearchResponse search(Function searchAction, + Function scrollAction) { + SearchResponse esResponse; + if (isScrollStarted()) { + esResponse = scrollAction.apply(scrollRequest()); + } else { + esResponse = searchAction.apply(searchRequest()); + } + setScrollId(esResponse.getScrollId()); + + return new ElasticsearchResponse(esResponse); + } + + @Override + public void clean(Consumer cleanAction) { + try { + if (isScrollStarted()) { + cleanAction.accept(getScrollId()); + } + } finally { + reset(); + } + } + + /** + * Generate Elasticsearch search request. + * + * @return search request + */ + public SearchRequest searchRequest() { + return new SearchRequest() + .indices(indexName) + .scroll(DEFAULT_SCROLL_TIMEOUT) + .source(sourceBuilder); + } + + /** + * Is scroll started which means pages after first is being requested. + * + * @return true if scroll started + */ + public boolean isScrollStarted() { + return (scrollId != null); + } + + /** + * Generate Elasticsearch scroll request by scroll id maintained. + * + * @return scroll request + */ + public SearchScrollRequest scrollRequest() { + Objects.requireNonNull(scrollId, "Scroll id cannot be null"); + return new SearchScrollRequest().scroll(DEFAULT_SCROLL_TIMEOUT).scrollId(scrollId); + } + + /** + * Reset internal state in case any stale data. However, ideally the same instance is not supposed + * to be reused across different physical plan. + */ + public void reset() { + scrollId = null; + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/ElasticsearchResponse.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/ElasticsearchResponse.java index e3382799ff..11f88b7029 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/ElasticsearchResponse.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/ElasticsearchResponse.java @@ -35,6 +35,10 @@ public ElasticsearchResponse(SearchResponse esResponse) { this.hits = esResponse.getHits(); // TODO: aggregation result is separate and not in SearchHit[] } + public ElasticsearchResponse(SearchHits hits) { + this.hits = hits; + } + /** * Is response empty. As ES doc says, "Each call to the scroll API returns the next batch of * results until there are no more results left to return, ie the hits array is empty." diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ElasticsearchErrorMessage.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ElasticsearchErrorMessage.java new file mode 100644 index 0000000000..9cd3d2f6f8 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ElasticsearchErrorMessage.java @@ -0,0 +1,77 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error; + +import java.util.Locale; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.ShardSearchFailure; + +/** + * Elasticsearch Error Message. + */ +public class ElasticsearchErrorMessage extends ErrorMessage { + + ElasticsearchErrorMessage(ElasticsearchException exception, int status) { + super(exception, status); + } + + @Override + protected String fetchReason() { + return "Error occurred in Elasticsearch engine: " + exception.getMessage(); + } + + /** + * Currently Sql-Jdbc plugin only supports string type as reason and details in the error + * messages. + */ + @Override + protected String fetchDetails() { + StringBuilder details = new StringBuilder(); + if (exception instanceof SearchPhaseExecutionException) { + details.append( + fetchSearchPhaseExecutionExceptionDetails((SearchPhaseExecutionException) exception)); + } else { + details.append(((ElasticsearchException) exception).getDetailedMessage()); + } + details.append( + "\nFor more details, please send request for Json format to see the raw response from " + + "elasticsearch engine."); + return details.toString(); + } + + /** + * Could not deliver the exactly same error messages due to the limit of JDBC types. + * Currently our cases occurred only SearchPhaseExecutionException instances + * among all types of ES exceptions + * according to the survey, see all types: ElasticsearchException.ElasticsearchExceptionHandle. + * Either add methods of fetching details for different types, or re-make a consistent + * message by not giving + * detailed messages/root causes but only a suggestion message. + */ + private String fetchSearchPhaseExecutionExceptionDetails( + SearchPhaseExecutionException exception) { + StringBuilder details = new StringBuilder(); + ShardSearchFailure[] shardFailures = exception.shardFailures(); + for (ShardSearchFailure failure : shardFailures) { + details.append(String.format(Locale.ROOT, "Shard[%d]: %s\n", failure.shardId(), + failure.getCause().toString())); + } + return details.toString(); + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessage.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessage.java new file mode 100644 index 0000000000..d09b043dbd --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessage.java @@ -0,0 +1,85 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error; + +import org.elasticsearch.rest.RestStatus; +import org.json.JSONObject; + +/** + * Error Message. + */ +public class ErrorMessage { + + protected Exception exception; + + private int status; + private String type; + private String reason; + private String details; + + /** + * Error Message Constructor. + */ + public ErrorMessage(Exception exception, int status) { + this.exception = exception; + this.status = status; + + this.type = fetchType(); + this.reason = fetchReason(); + this.details = fetchDetails(); + } + + private String fetchType() { + return exception.getClass().getSimpleName(); + } + + protected String fetchReason() { + return status == RestStatus.BAD_REQUEST.getStatus() + ? "Invalid Query" + : "There was internal problem at backend"; + } + + protected String fetchDetails() { + // Some exception prints internal information (full class name) which is security concern + return emptyStringIfNull(exception.getLocalizedMessage()); + } + + private String emptyStringIfNull(String str) { + return str != null ? str : ""; + } + + @Override + public String toString() { + JSONObject output = new JSONObject(); + + output.put("status", status); + output.put("error", getErrorAsJson()); + + return output.toString(2); + } + + private JSONObject getErrorAsJson() { + JSONObject errorJson = new JSONObject(); + + errorJson.put("type", type); + errorJson.put("reason", reason); + errorJson.put("details", details); + + return errorJson; + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactory.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactory.java new file mode 100644 index 0000000000..e0538b9cf1 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactory.java @@ -0,0 +1,54 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error; + +import lombok.experimental.UtilityClass; +import org.elasticsearch.ElasticsearchException; + +@UtilityClass +public class ErrorMessageFactory { + /** + * Create error message based on the exception type. + * Exceptions of ES exception type and exceptions with wrapped ES exception causes + * should create {@link ElasticsearchErrorMessage} + * + * @param e exception to create error message + * @param status exception status code + * @return error message + */ + public static ErrorMessage createErrorMessage(Exception e, int status) { + Throwable cause = unwrapCause(e); + if (cause instanceof ElasticsearchException) { + ElasticsearchException exception = (ElasticsearchException) cause; + return new ElasticsearchErrorMessage(exception, exception.status().getStatus()); + } + return new ErrorMessage(e, status); + } + + protected static Throwable unwrapCause(Throwable t) { + Throwable result = t; + if (result instanceof ElasticsearchException) { + return result; + } + if (result.getCause() == null) { + return result; + } + result = unwrapCause(result.getCause()); + return result; + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/setting/ElasticsearchSettings.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/setting/ElasticsearchSettings.java index da3d463592..37e8cc4571 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/setting/ElasticsearchSettings.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/setting/ElasticsearchSettings.java @@ -53,6 +53,18 @@ public class ElasticsearchSettings extends Settings { Setting.Property.NodeScope, Setting.Property.Dynamic); + private static final Setting QUERY_SIZE_LIMIT_SETTINGS = Setting.intSetting( + Key.QUERY_SIZE_LIMIT.getKeyValue(), + 200, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + private static final Setting PPL_ENABLED_SETTINGS = Setting.boolSetting( + Key.PPL_ENABLED.getKeyValue(), + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + /** * Construct ElasticsearchSetting. * The ElasticsearchSetting must be singleton. @@ -61,6 +73,10 @@ public ElasticsearchSettings(ClusterSettings clusterSettings) { ImmutableMap.Builder> settingBuilder = new ImmutableMap.Builder<>(); register(settingBuilder, clusterSettings, Key.PPL_QUERY_MEMORY_LIMIT, PPL_QUERY_MEMORY_LIMIT_SETTINGS, new Updater(Key.PPL_QUERY_MEMORY_LIMIT)); + register(settingBuilder, clusterSettings, Key.PPL_ENABLED, + PPL_ENABLED_SETTINGS, new Updater(Key.PPL_ENABLED)); + register(settingBuilder, clusterSettings, Key.QUERY_SIZE_LIMIT, + QUERY_SIZE_LIMIT_SETTINGS, new Updater(Key.QUERY_SIZE_LIMIT)); defaultSettings = settingBuilder.build(); } @@ -102,6 +118,10 @@ public void accept(Object newValue) { * Used by Plugin to init Setting. */ public static List> pluginSettings() { - return new ImmutableList.Builder>().add(PPL_QUERY_MEMORY_LIMIT_SETTINGS).build(); + return new ImmutableList.Builder>() + .add(PPL_QUERY_MEMORY_LIMIT_SETTINGS) + .add(PPL_ENABLED_SETTINGS) + .add(QUERY_SIZE_LIMIT_SETTINGS) + .build(); } } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java index accc3bf52d..ec5ce33e63 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java @@ -16,13 +16,17 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.mapping.IndexMapping; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.FilterQueryBuilder; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization.DefaultExpressionSerializer; import com.amazon.opendistroforelasticsearch.sql.planner.DefaultImplementor; +import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalFilter; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlan; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalRelation; import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; @@ -31,6 +35,7 @@ import java.util.HashMap; import java.util.Map; import lombok.RequiredArgsConstructor; +import org.elasticsearch.index.query.QueryBuilder; /** Elasticsearch table (index) implementation. */ @RequiredArgsConstructor @@ -43,6 +48,7 @@ public class ElasticsearchIndex implements Table { private static final Map ES_TYPE_TO_EXPR_TYPE_MAPPING = ImmutableMap.builder() .put("text", ElasticsearchDataType.ES_TEXT) + .put("text_keyword", ElasticsearchDataType.ES_TEXT_KEYWORD) .put("keyword", ExprCoreType.STRING) .put("integer", ExprCoreType.INTEGER) .put("long", ExprCoreType.LONG) @@ -57,6 +63,8 @@ public class ElasticsearchIndex implements Table { /** Elasticsearch client connection. */ private final ElasticsearchClient client; + private final Settings settings; + /** Current Elasticsearch index name. */ private final String indexName; @@ -78,7 +86,7 @@ public Map getFieldTypes() { /** TODO: Push down operations to index scan operator as much as possible in future. */ @Override public PhysicalPlan implement(LogicalPlan plan) { - ElasticsearchIndexScan indexScan = new ElasticsearchIndexScan(client, indexName, + ElasticsearchIndexScan indexScan = new ElasticsearchIndexScan(client, settings, indexName, new ElasticsearchExprValueFactory(getFieldTypes())); /* @@ -87,6 +95,25 @@ public PhysicalPlan implement(LogicalPlan plan) { * index scan. */ return plan.accept(new DefaultImplementor() { + @Override + public PhysicalPlan visitFilter(LogicalFilter node, ElasticsearchIndexScan context) { + // For now (without optimizer), only push down filter close to relation + if (!(node.getChild().get(0) instanceof LogicalRelation)) { + return super.visitFilter(node, context); + } + + FilterQueryBuilder queryBuilder = + new FilterQueryBuilder(new DefaultExpressionSerializer()); + + QueryBuilder query = queryBuilder.build(node.getCondition()); + if (query == null) { // Use default filter operator if unable to push down + return super.visitFilter(node, context); + } + + context.pushDown(query); + return visitChild(node, context); + } + @Override public PhysicalPlan visitRelation(LogicalRelation node, ElasticsearchIndexScan context) { return indexScan; diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java index bf140b5461..ccc31291f9 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java @@ -16,9 +16,14 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage; +import static org.elasticsearch.search.sort.FieldSortBuilder.DOC_FIELD_NAME; +import static org.elasticsearch.search.sort.SortOrder.ASC; + +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchQueryRequest; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchRequest; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; import com.amazon.opendistroforelasticsearch.sql.storage.TableScanOperator; @@ -28,9 +33,15 @@ import java.util.List; import lombok.EqualsAndHashCode; import lombok.ToString; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; -/** Elasticsearch index scan operator. */ +/** + * Elasticsearch index scan operator. + */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class ElasticsearchIndexScan extends TableScanOperator { @@ -51,10 +62,12 @@ public class ElasticsearchIndexScan extends TableScanOperator { /** * Todo. */ - public ElasticsearchIndexScan(ElasticsearchClient client, String indexName, + public ElasticsearchIndexScan(ElasticsearchClient client, + Settings settings, String indexName, ElasticsearchExprValueFactory exprValueFactory) { this.client = client; - this.request = new ElasticsearchRequest(indexName); + this.request = new ElasticsearchQueryRequest(indexName, + settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)); this.exprValueFactory = exprValueFactory; } @@ -82,10 +95,40 @@ public ExprValue next() { return exprValueFactory.construct(hits.next().getSourceAsString()); } + /** + * Push down query to DSL request. + * @param query query request + */ + public void pushDown(QueryBuilder query) { + SearchSourceBuilder source = request.getSourceBuilder(); + QueryBuilder current = source.query(); + if (current == null) { + source.query(query); + } else { + if (isBoolFilterQuery(current)) { + ((BoolQueryBuilder) current).filter(query); + } else { + source.query(QueryBuilders.boolQuery() + .filter(current) + .filter(query)); + } + } + + if (source.sorts() == null) { + source.sort(DOC_FIELD_NAME, ASC); // Make sure consistent order + } + } + @Override public void close() { super.close(); client.cleanup(request); } + + private boolean isBoolFilterQuery(QueryBuilder current) { + return (current instanceof BoolQueryBuilder) + && !((BoolQueryBuilder) current).filter().isEmpty(); + } + } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngine.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngine.java index 181d55db9c..a82bc98cb8 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngine.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngine.java @@ -16,6 +16,7 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.storage.StorageEngine; import com.amazon.opendistroforelasticsearch.sql.storage.Table; @@ -28,8 +29,10 @@ public class ElasticsearchStorageEngine implements StorageEngine { /** Elasticsearch client connection. */ private final ElasticsearchClient client; + private final Settings settings; + @Override public Table getTable(String name) { - return new ElasticsearchIndex(client, name); + return new ElasticsearchIndex(client, settings, name); } } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/ExpressionScriptEngine.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/ExpressionScriptEngine.java new file mode 100644 index 0000000000..abebafc294 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/ExpressionScriptEngine.java @@ -0,0 +1,86 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script; + +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.ExpressionFilterScriptFactory; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization.ExpressionSerializer; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import lombok.RequiredArgsConstructor; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; + +/** + * Custom expression script engine that supports using core engine expression code in DSL + * as a new script language just like built-in Painless language. + */ +@RequiredArgsConstructor +public class ExpressionScriptEngine implements ScriptEngine { + + /** + * Expression script language name. + */ + public static final String EXPRESSION_LANG_NAME = "opendistro_expression"; + + /** + * All supported script contexts and function to create factory from expression. + */ + private static final Map, Function> CONTEXTS = + ImmutableMap.of( + FilterScript.CONTEXT, + ExpressionFilterScriptFactory::new + ); + + /** + * Expression serializer that (de-)serializes expression. + */ + private final ExpressionSerializer serializer; + + @Override + public String getType() { + return EXPRESSION_LANG_NAME; + } + + @Override + public T compile(String scriptName, + String scriptCode, + ScriptContext context, + Map params) { + /* + * Note that in fact the expression source is already compiled in query engine. + * The "code" is actually a serialized expression tree by our serializer. + * Therefore the compilation here is simply to deserialize the expression tree. + */ + Expression expression = serializer.deserialize(scriptCode); + + if (CONTEXTS.containsKey(context)) { + return context.factoryClazz.cast(CONTEXTS.get(context).apply(expression)); + } + throw new IllegalStateException(String.format("Script context is currently not supported: " + + "all supported contexts [%s], given context [%s] ", CONTEXTS, context)); + } + + @Override + public Set> getSupportedContexts() { + return CONTEXTS.keySet(); + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScript.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScript.java new file mode 100644 index 0000000000..31908cd1f4 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScript.java @@ -0,0 +1,168 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; +import static java.util.stream.Collectors.toMap; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionNodeVisitor; +import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.time.chrono.ChronoZonedDateTime; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import lombok.EqualsAndHashCode; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.search.lookup.SearchLookup; + +/** + * Expression script executor that executes the expression on each document + * and determine if the document is supposed to be filtered out or not. + */ +@EqualsAndHashCode(callSuper = false) +class ExpressionFilterScript extends FilterScript { + + /** + * Expression to execute. + */ + private final Expression expression; + + public ExpressionFilterScript(Expression expression, + SearchLookup lookup, + LeafReaderContext context, + Map params) { + super(params, lookup, context); + this.expression = expression; + } + + @Override + public boolean execute() { + // Check current script are not being called by unprivileged code. + SpecialPermission.check(); + + return AccessController.doPrivileged((PrivilegedAction) () -> { + Set fields = extractFields(expression); + ElasticsearchExprValueFactory valueFactory = buildValueFactory(fields); + Environment valueEnv = buildValueEnv(fields, valueFactory); + ExprValue result = evaluateExpression(valueEnv); + return (Boolean) result.value(); + }); + } + + private Set extractFields(Expression expr) { + Set fields = new HashSet<>(); + expr.accept(new ExpressionNodeVisitor>() { + @Override + public Object visitReference(ReferenceExpression node, Set context) { + context.add(node); + return null; + } + }, fields); + return fields; + } + + private ElasticsearchExprValueFactory buildValueFactory(Set fields) { + Map typeEnv = fields.stream() + .collect(toMap( + ReferenceExpression::getAttr, + ReferenceExpression::type)); + return new ElasticsearchExprValueFactory(typeEnv); + } + + private Environment buildValueEnv( + Set fields, ElasticsearchExprValueFactory valueFactory) { + + Map valueEnv = new HashMap<>(); + for (ReferenceExpression field : fields) { + String fieldName = field.getAttr(); + ExprValue exprValue = valueFactory.construct(fieldName, getDocValue(field)); + valueEnv.put(field, exprValue); + } + return valueEnv::get; // Encapsulate map data structure into anonymous Environment class + } + + private Object getDocValue(ReferenceExpression field) { + String fieldName = getDocValueName(field); + ScriptDocValues docValue = getDoc().get(fieldName); + if (docValue == null || docValue.isEmpty()) { + return null; + } + + Object value = docValue.get(0); + if (value instanceof ChronoZonedDateTime) { + return ((ChronoZonedDateTime) value).toInstant(); + } + return castNumberToFieldType(value, field.type()); + } + + /** + * Text field doesn't have doc value (exception thrown even when you call "get") + * Limitation: assume inner field name is always "keyword". + */ + private String getDocValueName(ReferenceExpression field) { + String fieldName = field.getAttr(); + if (field.type() == ES_TEXT_KEYWORD) { + fieldName += ".keyword"; + } + return fieldName; + } + + /** + * DocValue only support long and double so cast to integer and float if needed. + * The doc value must be Long and Double for expr type Long/Integer and Double/Float respectively. + * Otherwise there must be bugs in our engine that causes the mismatch. + */ + private Object castNumberToFieldType(Object value, ExprType type) { + if (type == INTEGER) { + return ((Long) value).intValue(); + } else if (type == FLOAT) { + return ((Double) value).floatValue(); + } else { + return value; + } + } + + private ExprValue evaluateExpression(Environment valueEnv) { + ExprValue result = expression.valueOf(valueEnv); + if (result.isNull() || result.isMissing()) { + return ExprBooleanValue.of(false); + } + + if (result.type() != ExprCoreType.BOOLEAN) { + throw new IllegalStateException(String.format( + "Expression has wrong result type instead of boolean: " + + "expression [%s], result [%s]", expression, result)); + } + return result; + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptFactory.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptFactory.java new file mode 100644 index 0000000000..b47ae0740e --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptFactory.java @@ -0,0 +1,51 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter; + +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import java.util.Map; +import lombok.EqualsAndHashCode; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.search.lookup.SearchLookup; + +/** + * Expression script factory that generates leaf factory. + */ +@EqualsAndHashCode +public class ExpressionFilterScriptFactory implements FilterScript.Factory { + + /** + * Expression to execute. + */ + private final Expression expression; + + public ExpressionFilterScriptFactory(Expression expression) { + this.expression = expression; + } + + @Override + public boolean isResultDeterministic() { + // This implies the results are cacheable + return true; + } + + @Override + public FilterScript.LeafFactory newFactory(Map params, SearchLookup lookup) { + return new ExpressionFilterScriptLeafFactory(expression, params, lookup); + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java new file mode 100644 index 0000000000..f34b7650a6 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter; + +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import java.util.Map; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.search.lookup.SearchLookup; + +/** + * Expression script leaf factory that produces script executor for each leaf. + */ +class ExpressionFilterScriptLeafFactory implements FilterScript.LeafFactory { + + /** + * Expression to execute. + */ + private final Expression expression; + + /** + * Parameters for the expression. + */ + private final Map params; + + /** + * Document lookup that returns doc values. + */ + private final SearchLookup lookup; + + public ExpressionFilterScriptLeafFactory(Expression expression, + Map params, + SearchLookup lookup) { + this.expression = expression; + this.params = params; + this.lookup = lookup; + } + + @Override + public FilterScript newInstance(LeafReaderContext ctx) { + return new ExpressionFilterScript(expression, lookup, ctx, params); + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/FilterQueryBuilder.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/FilterQueryBuilder.java new file mode 100644 index 0000000000..79fca1982a --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/FilterQueryBuilder.java @@ -0,0 +1,115 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter; + +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.ExpressionScriptEngine.EXPRESSION_LANG_NAME; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.script.Script.DEFAULT_SCRIPT_TYPE; + +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene.LuceneQuery; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene.RangeQuery; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene.RangeQuery.Comparison; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene.TermQuery; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene.WildcardQuery; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization.ExpressionSerializer; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionNodeVisitor; +import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import java.util.function.BiFunction; +import lombok.RequiredArgsConstructor; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.ScriptQueryBuilder; +import org.elasticsearch.script.Script; + +@RequiredArgsConstructor +public class FilterQueryBuilder extends ExpressionNodeVisitor { + + /** + * Serializer that serializes expression for build DSL query. + */ + private final ExpressionSerializer serializer; + + /** + * Mapping from function name to lucene query builder. + */ + private final Map luceneQueries = + ImmutableMap.builder() + .put(BuiltinFunctionName.EQUAL.getName(), new TermQuery()) + .put(BuiltinFunctionName.LESS.getName(), new RangeQuery(Comparison.LT)) + .put(BuiltinFunctionName.GREATER.getName(), new RangeQuery(Comparison.GT)) + .put(BuiltinFunctionName.LTE.getName(), new RangeQuery(Comparison.LTE)) + .put(BuiltinFunctionName.GTE.getName(), new RangeQuery(Comparison.GTE)) + .put(BuiltinFunctionName.LIKE.getName(), new WildcardQuery()) + .build(); + + /** + * Build Elasticsearch filter query from expression. + * @param expr expression + * @return query + */ + public QueryBuilder build(Expression expr) { + try { + return expr.accept(this, null); + } catch (IllegalStateException e) { + //TODO: remove this try-catch once arithmetic and all expressions are serializable + return null; + } + } + + @Override + public QueryBuilder visitFunction(FunctionExpression func, Object context) { + FunctionName name = func.getFunctionName(); + switch (name.getFunctionName()) { + case "and": + return buildBoolQuery(func, context, BoolQueryBuilder::filter); + case "or": + return buildBoolQuery(func, context, BoolQueryBuilder::should); + case "not": + return buildBoolQuery(func, context, BoolQueryBuilder::mustNot); + default: { + LuceneQuery query = luceneQueries.get(name); + if (query != null && query.canSupport(func)) { + return query.build(func); + } + return buildScriptQuery(func); + } + } + } + + private BoolQueryBuilder buildBoolQuery(FunctionExpression node, + Object context, + BiFunction accumulator) { + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + for (Expression arg : node.getArguments()) { + accumulator.apply(boolQuery, arg.accept(this, context)); + } + return boolQuery; + } + + private ScriptQueryBuilder buildScriptQuery(FunctionExpression node) { + return new ScriptQueryBuilder(new Script( + DEFAULT_SCRIPT_TYPE, EXPRESSION_LANG_NAME, serializer.serialize(node), emptyMap())); + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/LuceneQuery.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/LuceneQuery.java new file mode 100644 index 0000000000..379995ce5c --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/LuceneQuery.java @@ -0,0 +1,89 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene; + +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.LiteralExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; +import org.elasticsearch.index.query.QueryBuilder; + +/** + * Lucene query abstraction that builds Lucene query from function expression. + */ +public abstract class LuceneQuery { + + /** + * Check if function expression supported by current Lucene query. + * Default behavior is that report supported if: + * 1. Left is a reference + * 2. Right side is a literal + * + * @param func function + * @return return true if supported, otherwise false. + */ + public boolean canSupport(FunctionExpression func) { + return (func.getArguments().size() == 2) + && (func.getArguments().get(0) instanceof ReferenceExpression) + && (func.getArguments().get(1) instanceof LiteralExpression); + } + + /** + * Build Lucene query from function expression. + * + * @param func function + * @return query + */ + public QueryBuilder build(FunctionExpression func) { + ReferenceExpression ref = (ReferenceExpression) func.getArguments().get(0); + LiteralExpression literal = (LiteralExpression) func.getArguments().get(1); + return doBuild(ref.getAttr(), ref.type(), literal.valueOf(null)); + } + + /** + * Build method that subclass implements by default which is to build query + * from reference and literal in function arguments. + * + * @param fieldName field name + * @param fieldType field type + * @param literal field value literal + * @return query + */ + protected QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue literal) { + throw new UnsupportedOperationException( + "Subclass doesn't implement this and build method either"); + } + + /** + * Convert multi-field text field name to its inner keyword field. The limitation and assumption + * is that the keyword field name is always "keyword" which is true by default. + * + * @param fieldName field name + * @param fieldType field type + * @return keyword field name for multi-field, otherwise original field name returned + */ + protected String convertTextToKeyword(String fieldName, ExprType fieldType) { + if (fieldType == ES_TEXT_KEYWORD) { + return fieldName + ".keyword"; + } + return fieldName; + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/RangeQuery.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/RangeQuery.java new file mode 100644 index 0000000000..4d156311e3 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/RangeQuery.java @@ -0,0 +1,60 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import lombok.RequiredArgsConstructor; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; + +/** + * Lucene query that builds range query for non-quality comparison. + */ +@RequiredArgsConstructor +public class RangeQuery extends LuceneQuery { + + public enum Comparison { + LT, GT, LTE, GTE, BETWEEN + } + + /** + * Comparison that range query build for. + */ + private final Comparison comparison; + + @Override + protected QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue literal) { + Object value = literal.value(); + + RangeQueryBuilder query = QueryBuilders.rangeQuery(fieldName); + switch (comparison) { + case LT: + return query.lt(value); + case GT: + return query.gt(value); + case LTE: + return query.lte(value); + case GTE: + return query.gte(value); + default: + throw new IllegalStateException("Comparison is supported by range query: " + comparison); + } + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/TermQuery.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/TermQuery.java new file mode 100644 index 0000000000..db07116a25 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/TermQuery.java @@ -0,0 +1,35 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +/** + * Lucene query that build term query for equality comparison. + */ +public class TermQuery extends LuceneQuery { + + @Override + protected QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue literal) { + fieldName = convertTextToKeyword(fieldName, fieldType); + return QueryBuilders.termQuery(fieldName, literal.value()); + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/WildcardQuery.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/WildcardQuery.java new file mode 100644 index 0000000000..7b5c109924 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/WildcardQuery.java @@ -0,0 +1,41 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +/** + * Lucene query that builds wildcard query. + */ +public class WildcardQuery extends LuceneQuery { + + @Override + protected QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue literal) { + fieldName = convertTextToKeyword(fieldName, fieldType); + String matchText = convertSqlWildcardToLucene(literal.stringValue()); + return QueryBuilders.wildcardQuery(fieldName, matchText); + } + + private String convertSqlWildcardToLucene(String text) { + return text.replace('%', '*') + .replace('_', '?'); + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/DefaultExpressionSerializer.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/DefaultExpressionSerializer.java new file mode 100644 index 0000000000..7697973cd4 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/DefaultExpressionSerializer.java @@ -0,0 +1,56 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization; + +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.util.Base64; + +/** + * Default serializer that (de-)serialize expressions by JDK serialization. + */ +public class DefaultExpressionSerializer implements ExpressionSerializer { + + @Override + public String serialize(Expression expr) { + try { + ByteArrayOutputStream output = new ByteArrayOutputStream(); + ObjectOutputStream objectOutput = new ObjectOutputStream(output); + objectOutput.writeObject(expr); + objectOutput.flush(); + return Base64.getEncoder().encodeToString(output.toByteArray()); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialize expression: " + expr, e); + } + } + + @Override + public Expression deserialize(String code) { + try { + ByteArrayInputStream input = new ByteArrayInputStream(Base64.getDecoder().decode(code)); + ObjectInputStream objectInput = new ObjectInputStream(input); + return (Expression) objectInput.readObject(); + } catch (Exception e) { + throw new IllegalStateException("Failed to deserialize expression code: " + code, e); + } + } + +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/ExpressionSerializer.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/ExpressionSerializer.java new file mode 100644 index 0000000000..fcf101c639 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/ExpressionSerializer.java @@ -0,0 +1,40 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization; + +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; + +/** + * Expression serializer that (de-)serializes expression object. + */ +public interface ExpressionSerializer { + + /** + * Serialize an expression. + * @param expr expression + * @return serialized string + */ + String serialize(Expression expr); + + /** + * Deserialize an expression. + * @param code serialized code + * @return original expression object + */ + Expression deserialize(String code); + +} diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClientTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClientTest.java index f9e3e36842..e53fa1a08e 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClientTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchNodeClientTest.java @@ -29,7 +29,7 @@ import static org.mockito.Mockito.when; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.mapping.IndexMapping; -import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchRequest; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchScrollRequest; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableSortedMap; @@ -93,7 +93,7 @@ public void getIndexMappings() throws IOException { assertEquals("geo_point", indexMapping.getFieldType("location")); assertEquals("some_new_es_type_outside_type_system", indexMapping.getFieldType("new_field")); assertEquals("text", indexMapping.getFieldType("field with spaces")); - assertEquals("text", indexMapping.getFieldType("employer")); + assertEquals("text_keyword", indexMapping.getFieldType("employer")); assertEquals("keyword", indexMapping.getFieldType("employer.raw")); assertEquals("nested", indexMapping.getFieldType("projects")); assertEquals("boolean", indexMapping.getFieldType("projects.active")); @@ -101,7 +101,7 @@ public void getIndexMappings() throws IOException { assertEquals("nested", indexMapping.getFieldType("projects.members")); assertEquals("text", indexMapping.getFieldType("projects.members.name")); assertEquals("object", indexMapping.getFieldType("manager")); - assertEquals("text", indexMapping.getFieldType("manager.name")); + assertEquals("text_keyword", indexMapping.getFieldType("manager.name")); assertEquals("keyword", indexMapping.getFieldType("manager.name.keyword")); assertEquals("keyword", indexMapping.getFieldType("manager.address")); assertEquals("long", indexMapping.getFieldType("manager.salary")); @@ -164,7 +164,7 @@ public void search() { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - ElasticsearchRequest request = new ElasticsearchRequest("test"); + ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); ElasticsearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -208,7 +208,7 @@ void cleanup() { ElasticsearchNodeClient client = new ElasticsearchNodeClient(mock(ClusterService.class), nodeClient); - ElasticsearchRequest request = new ElasticsearchRequest("test"); + ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); request.setScrollId("scroll123"); client.cleanup(request); assertFalse(request.isScrollStarted()); @@ -224,7 +224,7 @@ void cleanupWithoutScrollId() { ElasticsearchNodeClient client = new ElasticsearchNodeClient(mock(ClusterService.class), nodeClient); - ElasticsearchRequest request = new ElasticsearchRequest("test"); + ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); client.cleanup(request); verify(nodeClient, never()).prepareClearScroll(); } diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClientTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClientTest.java index 21b8da5acc..a91a2e861c 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClientTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/client/ElasticsearchRestClientTest.java @@ -28,7 +28,7 @@ import static org.mockito.Mockito.when; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.mapping.IndexMapping; -import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchRequest; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchScrollRequest; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; @@ -96,7 +96,7 @@ void getIndexMappings() throws IOException { assertEquals("geo_point", indexMapping.getFieldType("location")); assertEquals("some_new_es_type_outside_type_system", indexMapping.getFieldType("new_field")); assertEquals("text", indexMapping.getFieldType("field with spaces")); - assertEquals("text", indexMapping.getFieldType("employer")); + assertEquals("text_keyword", indexMapping.getFieldType("employer")); assertEquals("keyword", indexMapping.getFieldType("employer.raw")); assertEquals("nested", indexMapping.getFieldType("projects")); assertEquals("boolean", indexMapping.getFieldType("projects.active")); @@ -104,7 +104,7 @@ void getIndexMappings() throws IOException { assertEquals("nested", indexMapping.getFieldType("projects.members")); assertEquals("text", indexMapping.getFieldType("projects.members.name")); assertEquals("object", indexMapping.getFieldType("manager")); - assertEquals("text", indexMapping.getFieldType("manager.name")); + assertEquals("text_keyword", indexMapping.getFieldType("manager.name")); assertEquals("keyword", indexMapping.getFieldType("manager.name.keyword")); assertEquals("keyword", indexMapping.getFieldType("manager.address")); assertEquals("long", indexMapping.getFieldType("manager.salary")); @@ -137,7 +137,7 @@ void search() throws IOException { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - ElasticsearchRequest request = new ElasticsearchRequest("test"); + ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); ElasticsearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -155,7 +155,30 @@ void search() throws IOException { void searchWithIOException() throws IOException { when(restClient.search(any(), any())).thenThrow(new IOException()); assertThrows( - IllegalStateException.class, () -> client.search(new ElasticsearchRequest("test"))); + IllegalStateException.class, () -> client.search(new ElasticsearchScrollRequest("test"))); + } + + @Test + void scrollWithIOException() throws IOException { + // Mock first scroll request + SearchResponse searchResponse = mock(SearchResponse.class); + when(restClient.search(any(), any())).thenReturn(searchResponse); + when(searchResponse.getScrollId()).thenReturn("scroll123"); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits( + new SearchHit[] {new SearchHit(1)}, + new TotalHits(1L, TotalHits.Relation.EQUAL_TO), + 1.0F)); + + // Mock second scroll request followed + when(restClient.scroll(any(), any())).thenThrow(new IOException()); + + // First request run successfully + ElasticsearchScrollRequest scrollRequest = new ElasticsearchScrollRequest("test"); + client.search(scrollRequest); + assertThrows( + IllegalStateException.class, () -> client.search(scrollRequest)); } @Test @@ -170,7 +193,7 @@ void schedule() { @Test void cleanup() throws IOException { - ElasticsearchRequest request = new ElasticsearchRequest("test"); + ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); request.setScrollId("scroll123"); client.cleanup(request); verify(restClient).clearScroll(any(), any()); @@ -179,7 +202,7 @@ void cleanup() throws IOException { @Test void cleanupWithoutScrollId() throws IOException { - ElasticsearchRequest request = new ElasticsearchRequest("test"); + ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); client.cleanup(request); verify(restClient, never()).clearScroll(any(), any()); } @@ -188,7 +211,7 @@ void cleanupWithoutScrollId() throws IOException { void cleanupWithIOException() throws IOException { when(restClient.clearScroll(any(), any())).thenThrow(new IOException()); - ElasticsearchRequest request = new ElasticsearchRequest("test"); + ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); request.setScrollId("scroll123"); assertThrows(IllegalStateException.class, () -> client.cleanup(request)); } diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextKeywordValueTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextKeywordValueTest.java new file mode 100644 index 0000000000..d52e0459c8 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextKeywordValueTest.java @@ -0,0 +1,31 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value; + +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +class ElasticsearchExprTextKeywordValueTest { + + @Test + public void testTypeOfExprTextKeywordValue() { + assertEquals(ES_TEXT_KEYWORD, new ElasticsearchExprTextKeywordValue("A").type()); + } + +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactoryTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactoryTest.java index f83b323d3f..23d7427361 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactoryTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactoryTest.java @@ -34,6 +34,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRUCT; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIMESTAMP; import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -69,6 +70,7 @@ class ElasticsearchExprValueFactoryTest { .put("arrayV.info", STRING) .put("arrayV.author", STRING) .put("textV", ES_TEXT) + .put("textKeywordV", ES_TEXT_KEYWORD) .build(); private ElasticsearchExprValueFactory exprValueFactory = new ElasticsearchExprValueFactory(MAPPING); @@ -76,42 +78,56 @@ class ElasticsearchExprValueFactoryTest { @Test public void constructNullValue() { assertEquals(nullValue(), tupleValue("{\"intV\":null}").get("intV")); + assertEquals(nullValue(), constructFromObject("intV", null)); } @Test public void constructInteger() { assertEquals(integerValue(1), tupleValue("{\"intV\":1}").get("intV")); + assertEquals(integerValue(1), constructFromObject("intV", 1)); } @Test public void constructLong() { assertEquals(longValue(1L), tupleValue("{\"longV\":1}").get("longV")); + assertEquals(longValue(1L), constructFromObject("longV", 1L)); } @Test public void constructFloat() { assertEquals(floatValue(1f), tupleValue("{\"floatV\":1.0}").get("floatV")); + assertEquals(floatValue(1f), constructFromObject("floatV", 1f)); } @Test public void constructDouble() { assertEquals(doubleValue(1d), tupleValue("{\"doubleV\":1.0}").get("doubleV")); + assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d)); } @Test public void constructString() { assertEquals(stringValue("text"), tupleValue("{\"stringV\":\"text\"}").get("stringV")); + assertEquals(stringValue("text"), constructFromObject("stringV", "text")); } @Test public void constructBoolean() { assertEquals(booleanValue(true), tupleValue("{\"boolV\":true}").get("boolV")); + assertEquals(booleanValue(true), constructFromObject("boolV", true)); } @Test public void constructText() { - assertEquals(new ElasticsearchExprTextValue("text"), tupleValue("{\"textV\":\"text\"}").get( - "textV")); + assertEquals(new ElasticsearchExprTextValue("text"), + tupleValue("{\"textV\":\"text\"}").get("textV")); + assertEquals(new ElasticsearchExprTextValue("text"), + constructFromObject("textV", "text")); + + assertEquals(new ElasticsearchExprTextKeywordValue("text"), + tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")); + assertEquals(new ElasticsearchExprTextKeywordValue("text"), + constructFromObject("textKeywordV", "text")); } @Test @@ -131,6 +147,16 @@ public void constructDate() { assertEquals( new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), tupleValue("{\"dateV\":1420070400001}").get("dateV")); + + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("dateV", 1420070400001L)); + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("dateV", Instant.ofEpochMilli(1420070400001L))); + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + constructFromObject("dateV", "2015-01-01 12:10:30")); } @Test @@ -191,12 +217,23 @@ public void constructUnsupportedTypeThrowException() { IllegalStateException exception = assertThrows(IllegalStateException.class, () -> exprValueFactory.construct("{\"type\":1}")); assertEquals("Unsupported type: TEST_TYPE for field: type, value: 1.", exception.getMessage()); + + exception = + assertThrows(IllegalStateException.class, () -> exprValueFactory.construct("type", 1)); + assertEquals( + "Unsupported type TEST_TYPE to construct expression value " + + "from object for field: type, value: 1.", + exception.getMessage()); } public Map tupleValue(String jsonString) { return (Map) exprValueFactory.construct(jsonString).value(); } + private ExprValue constructFromObject(String fieldName, Object value) { + return exprValueFactory.construct(fieldName, value); + } + @EqualsAndHashCode @ToString private static class TestType implements ExprType { diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngineTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngineTest.java index f1ca7ac1f8..c91ecd062d 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngineTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionEngineTest.java @@ -32,6 +32,7 @@ import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.executor.protector.ElasticsearchExecutionProtector; +import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; import com.amazon.opendistroforelasticsearch.sql.storage.TableScanOperator; import java.util.ArrayList; @@ -53,6 +54,8 @@ class ElasticsearchExecutionEngineTest { @Mock private ElasticsearchExecutionProtector protector; + @Mock private static ExecutionEngine.Schema schema; + @BeforeEach void setUp() { doAnswer( @@ -148,5 +151,10 @@ public boolean hasNext() { public ExprValue next() { return it.next(); } + + @Override + public ExecutionEngine.Schema schema() { + return schema; + } } } diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java index c5dcf97b55..a4225c5235 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java @@ -21,18 +21,23 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.named; import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; import static com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlanDSL.filter; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.executor.protector.ElasticsearchExecutionProtector; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.executor.protector.ResourceMonitorPlan; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.setting.ElasticsearchSettings; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.ElasticsearchIndexScan; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.AvgAggregator; @@ -62,6 +67,9 @@ class ElasticsearchExecutionProtectorTest { @Mock private ElasticsearchExprValueFactory exprValueFactory; + @Mock + private ElasticsearchSettings settings; + private ElasticsearchExecutionProtector executionProtector; @BeforeEach @@ -71,8 +79,10 @@ public void setup() { @Test public void testProtectIndexScan() { + when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + String indexName = "test"; - ReferenceExpression include = ref("age", INTEGER); + NamedExpression include = named("age", ref("age", INTEGER)); ReferenceExpression exclude = ref("name", STRING); ReferenceExpression dedupeField = ref("name", STRING); Expression filterExpr = literal(ExprBooleanValue.of(true)); @@ -97,7 +107,7 @@ public void testProtectIndexScan() { filter( resourceMonitor( new ElasticsearchIndexScan( - client, indexName, exprValueFactory)), + client, settings, indexName, exprValueFactory)), filterExpr), aggregators, groupByExprs), @@ -118,7 +128,7 @@ public void testProtectIndexScan() { PhysicalPlanDSL.agg( filter( new ElasticsearchIndexScan( - client, indexName, exprValueFactory), + client, settings, indexName, exprValueFactory), filterExpr), aggregators, groupByExprs), diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchQueryRequestTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchQueryRequestTest.java new file mode 100644 index 0000000000..de6ab040f3 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchQueryRequestTest.java @@ -0,0 +1,102 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.request; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; +import java.util.function.Consumer; +import java.util.function.Function; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class ElasticsearchQueryRequestTest { + + @Mock + private Function searchAction; + + @Mock + private Function scrollAction; + + @Mock + private Consumer cleanAction; + + @Mock + private SearchResponse searchResponse; + + @Mock + private SearchHits searchHits; + + @Mock + private SearchHit searchHit; + + private final ElasticsearchQueryRequest request = new ElasticsearchQueryRequest("test", 200); + + @Test + void search() { + when(searchAction.apply(any())).thenReturn(searchResponse); + when(searchResponse.getHits()).thenReturn(searchHits); + when(searchHits.getHits()).thenReturn(new SearchHit[]{searchHit}); + + ElasticsearchResponse searchResponse = request.search(searchAction, scrollAction); + assertFalse(searchResponse.isEmpty()); + searchResponse = request.search(searchAction, scrollAction); + assertTrue(searchResponse.isEmpty()); + verify(searchAction, times(1)).apply(any()); + } + + @Test + void clean() { + request.clean(cleanAction); + verify(cleanAction, never()).accept(any()); + } + + @Test + void searchRequest() { + request.getSourceBuilder().query(QueryBuilders.termQuery("name", "John")); + + assertEquals( + new SearchRequest() + .indices("test") + .source(new SearchSourceBuilder() + .timeout(ElasticsearchQueryRequest.DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), + request.searchRequest()); + } +} diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchRequestTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchScrollRequestTest.java similarity index 86% rename from elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchRequestTest.java rename to elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchScrollRequestTest.java index cd6600d100..b836c48f7b 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchRequestTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/request/ElasticsearchScrollRequestTest.java @@ -26,9 +26,9 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.junit.jupiter.api.Test; -class ElasticsearchRequestTest { +class ElasticsearchScrollRequestTest { - private final ElasticsearchRequest request = new ElasticsearchRequest("test"); + private final ElasticsearchScrollRequest request = new ElasticsearchScrollRequest("test"); @Test void searchRequest() { @@ -37,7 +37,7 @@ void searchRequest() { assertEquals( new SearchRequest() .indices("test") - .scroll(ElasticsearchRequest.DEFAULT_SCROLL_TIMEOUT) + .scroll(ElasticsearchScrollRequest.DEFAULT_SCROLL_TIMEOUT) .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), request.searchRequest()); } @@ -55,7 +55,7 @@ void scrollRequest() { request.setScrollId("scroll123"); assertEquals( new SearchScrollRequest() - .scroll(ElasticsearchRequest.DEFAULT_SCROLL_TIMEOUT) + .scroll(ElasticsearchScrollRequest.DEFAULT_SCROLL_TIMEOUT) .scrollId("scroll123"), request.scrollRequest()); } diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ElasticsearchErrorMessageTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ElasticsearchErrorMessageTest.java new file mode 100644 index 0000000000..f5386a1914 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ElasticsearchErrorMessageTest.java @@ -0,0 +1,81 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error; + +import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class ElasticsearchErrorMessageTest { + + @Mock + private ElasticsearchException elasticsearchException; + + @Mock + private SearchPhaseExecutionException searchPhaseExecutionException; + + @Mock + private ShardSearchFailure shardSearchFailure; + + @Test + public void fetchReason() { + when(elasticsearchException.getMessage()).thenReturn("error"); + + ElasticsearchErrorMessage errorMessage = + new ElasticsearchErrorMessage(elasticsearchException, SERVICE_UNAVAILABLE.getStatus()); + assertEquals("Error occurred in Elasticsearch engine: error", errorMessage.fetchReason()); + } + + @Test + public void fetchDetailsWithElasticsearchException() { + when(elasticsearchException.getDetailedMessage()).thenReturn("detail error"); + + ElasticsearchErrorMessage errorMessage = + new ElasticsearchErrorMessage(elasticsearchException, SERVICE_UNAVAILABLE.getStatus()); + assertEquals("detail error\n" + + "For more details, please send request for " + + "Json format to see the raw response from elasticsearch engine.", + errorMessage.fetchDetails()); + } + + @Test + public void fetchDetailsWithSearchPhaseExecutionException() { + when(searchPhaseExecutionException.shardFailures()) + .thenReturn(new ShardSearchFailure[] {shardSearchFailure}); + when(shardSearchFailure.shardId()).thenReturn(1); + when(shardSearchFailure.getCause()).thenReturn(new IllegalStateException("illegal state")); + + ElasticsearchErrorMessage errorMessage = + new ElasticsearchErrorMessage(searchPhaseExecutionException, + SERVICE_UNAVAILABLE.getStatus()); + assertEquals("Shard[1]: java.lang.IllegalStateException: illegal state\n" + + "\n" + + "For more details, please send request for Json format to see the " + + "raw response from elasticsearch engine.", + errorMessage.fetchDetails()); + } +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactoryTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactoryTest.java new file mode 100644 index 0000000000..ea582f77df --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactoryTest.java @@ -0,0 +1,62 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.rest.RestStatus; +import org.junit.jupiter.api.Test; + +public class ErrorMessageFactoryTest { + private Throwable nonEsThrowable = new Throwable(); + private Throwable esThrowable = new ElasticsearchException(nonEsThrowable); + + @Test + public void esExceptionShouldCreateEsErrorMessage() { + Exception exception = new ElasticsearchException(nonEsThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + assertTrue(msg instanceof ElasticsearchErrorMessage); + } + + @Test + public void nonEsExceptionShouldCreateGenericErrorMessage() { + Exception exception = new Exception(nonEsThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + assertFalse(msg instanceof ElasticsearchErrorMessage); + } + + @Test + public void nonEsExceptionWithWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = (Exception) esThrowable; + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + assertTrue(msg instanceof ElasticsearchErrorMessage); + } + + @Test + public void nonEsExceptionWithMultiLayerWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = new Exception(new Throwable(new Throwable(esThrowable))); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + assertTrue(msg instanceof ElasticsearchErrorMessage); + } +} diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageTest.java new file mode 100644 index 0000000000..8ef13bfc70 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageTest.java @@ -0,0 +1,75 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error; + +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; +import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class ErrorMessageTest { + + @Test + public void testToString() { + ErrorMessage errorMessage = + new ErrorMessage(new IllegalStateException("illegal state"), + SERVICE_UNAVAILABLE.getStatus()); + assertEquals("{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"illegal state\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}", errorMessage.toString()); + } + + @Test + public void testBadRequestToString() { + ErrorMessage errorMessage = + new ErrorMessage(new IllegalStateException(), + BAD_REQUEST.getStatus()); + assertEquals("{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid Query\",\n" + + " \"details\": \"\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}", errorMessage.toString()); + } + + @Test + public void testToStringWithEmptyErrorMessage() { + ErrorMessage errorMessage = + new ErrorMessage(new IllegalStateException(), + SERVICE_UNAVAILABLE.getStatus()); + assertEquals("{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}", errorMessage.toString()); + } +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java index 2ace5a4cc7..5a41dfd2fe 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java @@ -17,6 +17,8 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static org.elasticsearch.search.sort.FieldSortBuilder.DOC_FIELD_NAME; +import static org.elasticsearch.search.sort.SortOrder.ASC; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -26,15 +28,21 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchQueryRequest; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchRequest; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; import com.google.common.collect.ImmutableMap; import java.util.Arrays; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -48,14 +56,22 @@ class ElasticsearchIndexScanTest { @Mock private ElasticsearchClient client; + @Mock + private Settings settings; + private ElasticsearchExprValueFactory exprValueFactory = new ElasticsearchExprValueFactory( - ImmutableMap.of("name", STRING, "department", STRING)); + ImmutableMap.of("name", STRING, "department", STRING)); + + @BeforeEach + void setup() { + when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + } @Test void queryEmptyResult() { mockResponse(); try (ElasticsearchIndexScan indexScan = - new ElasticsearchIndexScan(client, "test", exprValueFactory)) { + new ElasticsearchIndexScan(client, settings, "test", exprValueFactory)) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -69,7 +85,7 @@ void queryAllResults() { new SearchHit[]{employee(3, "Allen", "IT")}); try (ElasticsearchIndexScan indexScan = - new ElasticsearchIndexScan(client, "employees", exprValueFactory)) { + new ElasticsearchIndexScan(client, settings, "employees", exprValueFactory)) { indexScan.open(); assertTrue(indexScan.hasNext()); @@ -86,6 +102,59 @@ void queryAllResults() { verify(client).cleanup(any()); } + @Test + void pushDownFilters() { + assertThat() + .pushDown(QueryBuilders.termQuery("name", "John")) + .shouldQuery(QueryBuilders.termQuery("name", "John")) + .pushDown(QueryBuilders.termQuery("age", 30)) + .shouldQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("name", "John")) + .filter(QueryBuilders.termQuery("age", 30))) + .pushDown(QueryBuilders.rangeQuery("balance").gte(10000)) + .shouldQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("name", "John")) + .filter(QueryBuilders.termQuery("age", 30)) + .filter(QueryBuilders.rangeQuery("balance").gte(10000))); + } + + private PushDownAssertion assertThat() { + return new PushDownAssertion(client, exprValueFactory, settings); + } + + private static class PushDownAssertion { + private final ElasticsearchClient client; + private final ElasticsearchIndexScan indexScan; + private final ElasticsearchResponse response; + + public PushDownAssertion(ElasticsearchClient client, + ElasticsearchExprValueFactory valueFactory, + Settings settings) { + this.client = client; + this.indexScan = new ElasticsearchIndexScan(client, settings, "test", valueFactory); + this.response = mock(ElasticsearchResponse.class); + when(response.isEmpty()).thenReturn(true); + } + + PushDownAssertion pushDown(QueryBuilder query) { + indexScan.pushDown(query); + return this; + } + + PushDownAssertion shouldQuery(QueryBuilder expected) { + ElasticsearchRequest request = new ElasticsearchQueryRequest("test", 200); + request.getSourceBuilder() + .query(expected) + .sort(DOC_FIELD_NAME, ASC); + when(client.search(request)).thenReturn(response); + indexScan.open(); + return this; + } + + } + private void mockResponse(SearchHit[]... searchHitBatches) { when(client.search(any())) .thenAnswer( diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java index 458c2f2ecd..d2dc73156d 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java @@ -20,6 +20,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.named; import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; import static com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlanDSL.aggregation; import static com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlanDSL.eval; @@ -34,9 +35,11 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.when; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; @@ -44,13 +47,19 @@ import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.mapping.IndexMapping; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.NamedExpression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.AvgAggregator; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlan; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlanDSL; +import com.amazon.opendistroforelasticsearch.sql.planner.physical.FilterOperator; +import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlanDSL; +import com.amazon.opendistroforelasticsearch.sql.planner.physical.ProjectOperator; import com.amazon.opendistroforelasticsearch.sql.storage.Table; import com.google.common.collect.ImmutableMap; import java.util.Arrays; @@ -66,12 +75,17 @@ @ExtendWith(MockitoExtension.class) class ElasticsearchIndexTest { + private final DSL dsl = new ExpressionConfig().dsl(new ExpressionConfig().functionRepository()); + @Mock private ElasticsearchClient client; @Mock private ElasticsearchExprValueFactory exprValueFactory; + @Mock + private Settings settings; + @Test void getFieldTypes() { when(client.getIndexMappings("test")) @@ -92,7 +106,7 @@ void getFieldTypes() { .put("birthday", "date") .build()))); - Table index = new ElasticsearchIndex(client, "test"); + Table index = new ElasticsearchIndex(client, settings, "test"); Map fieldTypes = index.getFieldTypes(); assertThat( fieldTypes, @@ -112,17 +126,22 @@ void getFieldTypes() { @Test void implementRelationOperatorOnly() { + when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + String indexName = "test"; LogicalPlan plan = relation(indexName); - Table index = new ElasticsearchIndex(client, indexName); + Table index = new ElasticsearchIndex(client, settings, indexName); assertEquals( - new ElasticsearchIndexScan(client, indexName, exprValueFactory), index.implement(plan)); + new ElasticsearchIndexScan(client, settings, indexName, exprValueFactory), + index.implement(plan)); } @Test void implementOtherLogicalOperators() { + when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + String indexName = "test"; - ReferenceExpression include = ref("age", INTEGER); + NamedExpression include = named("age", ref("age", INTEGER)); ReferenceExpression exclude = ref("name", STRING); ReferenceExpression dedupeField = ref("name", STRING); Expression filterExpr = literal(ExprBooleanValue.of(true)); @@ -155,7 +174,7 @@ void implementOtherLogicalOperators() { dedupeField), include); - Table index = new ElasticsearchIndex(client, indexName); + Table index = new ElasticsearchIndex(client, settings, indexName); assertEquals( PhysicalPlanDSL.project( PhysicalPlanDSL.dedupe( @@ -164,10 +183,10 @@ void implementOtherLogicalOperators() { PhysicalPlanDSL.remove( PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( - PhysicalPlanDSL.filter( - new ElasticsearchIndexScan( - client, indexName, exprValueFactory), - filterExpr), + PhysicalPlanDSL.filter( + new ElasticsearchIndexScan( + client, settings, indexName, exprValueFactory), + filterExpr), aggregators, groupByExprs), mappings), @@ -179,4 +198,50 @@ void implementOtherLogicalOperators() { include), index.implement(plan)); } + + @Test + void shouldDiscardPhysicalFilterIfConditionPushedDown() { + when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + + ReferenceExpression field = ref("name", STRING); + NamedExpression named = named("n", field); + Expression filterExpr = dsl.equal(field, literal("John")); + + String indexName = "test"; + ElasticsearchIndex index = new ElasticsearchIndex(client, settings, indexName); + PhysicalPlan plan = index.implement( + project( + filter( + relation(indexName), + filterExpr + ), + named)); + + assertTrue(plan instanceof ProjectOperator); + assertTrue(((ProjectOperator) plan).getInput() instanceof ElasticsearchIndexScan); + } + + @Test + void shouldNotPushDownFilterFarFromRelation() { + when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + + ReferenceExpression field = ref("name", STRING); + Expression filterExpr = dsl.equal(field, literal("John")); + List groupByExprs = Arrays.asList(ref("age", INTEGER)); + List aggregators = Arrays.asList(new AvgAggregator(groupByExprs, DOUBLE)); + + String indexName = "test"; + ElasticsearchIndex index = new ElasticsearchIndex(client, settings, indexName); + PhysicalPlan plan = index.implement( + filter( + aggregation( + relation(indexName), + aggregators, + groupByExprs + ), + filterExpr)); + + assertTrue(plan instanceof FilterOperator); + } + } diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngineTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngineTest.java index f17455f988..51f62c2d29 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngineTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchStorageEngineTest.java @@ -18,7 +18,9 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.setting.ElasticsearchSettings; import com.amazon.opendistroforelasticsearch.sql.storage.Table; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -30,9 +32,11 @@ class ElasticsearchStorageEngineTest { @Mock private ElasticsearchClient client; + @Mock private Settings settings; + @Test public void getTable() { - ElasticsearchStorageEngine engine = new ElasticsearchStorageEngine(client); + ElasticsearchStorageEngine engine = new ElasticsearchStorageEngine(client, settings); Table table = engine.getTable("test"); assertNotNull(table); } diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/ExpressionScriptEngineTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/ExpressionScriptEngineTest.java new file mode 100644 index 0000000000..9be9087527 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/ExpressionScriptEngineTest.java @@ -0,0 +1,82 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script; + +import static java.util.Collections.emptyMap; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.ExpressionFilterScriptFactory; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization.ExpressionSerializer; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +@ExtendWith(MockitoExtension.class) +class ExpressionScriptEngineTest { + + @Mock + private ExpressionSerializer serializer; + + private ScriptEngine scriptEngine; + + private final Expression expression = DSL.literal(true); + + @BeforeEach + void set_up() { + scriptEngine = new ExpressionScriptEngine(serializer); + } + + @Test + void should_return_custom_script_language_name() { + assertEquals(ExpressionScriptEngine.EXPRESSION_LANG_NAME, scriptEngine.getType()); + } + + @Test + void can_initialize_filter_script_factory_by_compiled_script() { + when(serializer.deserialize("test code")).thenReturn(expression); + + assertThat(scriptEngine.getSupportedContexts(), + contains(FilterScript.CONTEXT)); + + Object actualFactory = scriptEngine.compile( + "test", "test code", FilterScript.CONTEXT, emptyMap()); + assertEquals(new ExpressionFilterScriptFactory(expression), actualFactory); + } + + @Test + void should_throw_exception_for_unsupported_script_context() { + ScriptContext unknownCtx = mock(ScriptContext.class); + assertThrows(IllegalStateException.class, () -> + scriptEngine.compile("test", "test code", unknownCtx, emptyMap())); + } + +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java new file mode 100644 index 0000000000..041ff9a694 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java @@ -0,0 +1,76 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.when; + +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +@ExtendWith(MockitoExtension.class) +class ExpressionFilterScriptFactoryTest { + + @Mock + private SearchLookup searchLookup; + + @Mock + private LeafSearchLookup leafSearchLookup; + + @Mock + private LeafReaderContext leafReaderContext; + + private final Expression expression = DSL.literal(true); + + private final Map params = Collections.emptyMap(); + + private final FilterScript.Factory factory = new ExpressionFilterScriptFactory(expression); + + @Test + void should_return_deterministic_result() { + assertTrue(factory.isResultDeterministic()); + } + + @Test + void can_initialize_expression_filter_script() throws IOException { + when(searchLookup.getLeafSearchLookup(leafReaderContext)).thenReturn(leafSearchLookup); + + FilterScript.LeafFactory leafFactory = factory.newFactory(params, searchLookup); + FilterScript actualFilterScript = leafFactory.newInstance(leafReaderContext); + + assertEquals( + new ExpressionFilterScript(expression, searchLookup, leafReaderContext, params), + actualFilterScript + ); + } + +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptTest.java new file mode 100644 index 0000000000..cd9499386f --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/ExpressionFilterScriptTest.java @@ -0,0 +1,233 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIMESTAMP; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; +import static java.util.Collections.emptyMap; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTimestampValue; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.LiteralExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; +import com.google.common.collect.ImmutableMap; +import java.time.ZonedDateTime; +import java.util.Map; +import lombok.RequiredArgsConstructor; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.search.lookup.LeafDocLookup; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +@ExtendWith(MockitoExtension.class) +class ExpressionFilterScriptTest { + + private final DSL dsl = new ExpressionConfig().dsl(new ExpressionConfig().functionRepository()); + + @Mock + private SearchLookup lookup; + + @Mock + private LeafSearchLookup leafLookup; + + @Mock + private LeafReaderContext context; + + @Test + void should_match_if_true_literal() { + assertThat() + .docValues() + .filterBy(literal(true)) + .shouldMatch(); + } + + @Test + void should_not_match_if_false_literal() { + assertThat() + .docValues() + .filterBy(literal(false)) + .shouldNotMatch(); + } + + @Test + void can_execute_expression_with_integer_field() { + assertThat() + .docValues("age", 30L) // DocValue only supports long + .filterBy( + dsl.greater(ref("age", INTEGER), literal(20))) + .shouldMatch(); + } + + @Test + void can_execute_expression_with_text_keyword_field() { + assertThat() + .docValues("name.keyword", "John") + .filterBy( + dsl.equal(ref("name", ES_TEXT_KEYWORD), literal("John"))) + .shouldMatch(); + } + + @Test + void can_execute_expression_with_float_field() { + assertThat() + .docValues( + "balance", 100.0, // DocValue only supports double + "name", "John") + .filterBy( + dsl.and( + dsl.less(ref("balance", FLOAT), literal(150.0F)), + dsl.equal(ref("name", STRING), literal("John")))) + .shouldMatch(); + } + + @Test + void can_execute_expression_with_date_field() { + ExprTimestampValue ts = new ExprTimestampValue("2020-08-04 10:00:00"); + assertThat() + .docValues("birthday", ZonedDateTime.parse("2020-08-04T10:00:00Z")) + .filterBy(dsl.equal(ref("birthday", TIMESTAMP), new LiteralExpression(ts))) + .shouldMatch(); + } + + @Test + void can_execute_expression_with_missing_field() { + assertThat() + .docValues("age", 30) + .filterBy(ref("name", STRING)) + .shouldNotMatch(); + } + + @Test + void cannot_execute_non_predicate_expression() { + assertThrow(IllegalStateException.class, + "Expression has wrong result type instead of boolean: expression [10], result [10]") + .docValues() + .filterBy(literal(10)); + } + + private ExprScriptAssertion assertThat() { + return new ExprScriptAssertion(lookup, leafLookup, context); + } + + private ExprScriptAssertion assertThrow(Class clazz, + String message) { + return new ExprScriptAssertion(lookup, leafLookup, context) { + @Override + ExprScriptAssertion filterBy(Expression expr) { + Throwable t = assertThrows(clazz, () -> super.filterBy(expr)); + assertEquals(message, t.getMessage()); + return null; + } + }; + } + + @RequiredArgsConstructor + private static class ExprScriptAssertion { + private final SearchLookup lookup; + private final LeafSearchLookup leafLookup; + private final LeafReaderContext context; + private boolean isMatched; + + ExprScriptAssertion docValues() { + return this; + } + + ExprScriptAssertion docValues(String name, Object value) { + LeafDocLookup leafDocLookup = mockLeafDocLookup( + ImmutableMap.of(name, new FakeScriptDocValues<>(value))); + + when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); + when(leafLookup.doc()).thenReturn(leafDocLookup); + return this; + } + + ExprScriptAssertion docValues(String name1, Object value1, + String name2, Object value2) { + LeafDocLookup leafDocLookup = mockLeafDocLookup( + ImmutableMap.of( + name1, new FakeScriptDocValues<>(value1), + name2, new FakeScriptDocValues<>(value2))); + + when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); + when(leafLookup.doc()).thenReturn(leafDocLookup); + return this; + } + + ExprScriptAssertion filterBy(Expression expr) { + ExpressionFilterScript script = new ExpressionFilterScript(expr, lookup, context, emptyMap()); + isMatched = script.execute(); + return this; + } + + void shouldMatch() { + Assertions.assertTrue(isMatched); + } + + void shouldNotMatch() { + Assertions.assertFalse(isMatched); + } + + private LeafDocLookup mockLeafDocLookup(Map> docValueByNames) { + LeafDocLookup leafDocLookup = mock(LeafDocLookup.class); + when(leafDocLookup.get(anyString())) + .thenAnswer(invocation -> docValueByNames.get(invocation.getArgument(0))); + return leafDocLookup; + } + } + + @RequiredArgsConstructor + private static class FakeScriptDocValues extends ScriptDocValues { + private final T value; + + @Override + public void setNextDocId(int docId) { + throw new UnsupportedOperationException("Fake script doc values doesn't implement this yet"); + } + + @Override + public T get(int index) { + return value; + } + + @Override + public int size() { + return 1; + } + } + +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/FilterQueryBuilderTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/FilterQueryBuilderTest.java new file mode 100644 index 0000000000..aabbd857dc --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/FilterQueryBuilderTest.java @@ -0,0 +1,272 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.when; + +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization.ExpressionSerializer; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +@ExtendWith(MockitoExtension.class) +class FilterQueryBuilderTest { + + private final DSL dsl = new ExpressionConfig().dsl(new ExpressionConfig().functionRepository()); + + @Mock + private ExpressionSerializer serializer; + + private FilterQueryBuilder filterQueryBuilder; + + @BeforeEach + void set_up() { + filterQueryBuilder = new FilterQueryBuilder(serializer); + } + + @Test + void should_return_null_if_exception() { + when(serializer.serialize(any())).thenThrow(IllegalStateException.class); + + assertNull( + filterQueryBuilder.build( + dsl.equal(dsl.abs(ref("age", INTEGER)), literal(30)))); + } + + @Test + void should_build_term_query_for_equality_expression() { + assertEquals( + "{\n" + + " \"term\" : {\n" + + " \"name\" : {\n" + + " \"value\" : \"John\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + dsl.equal( + ref("name", STRING), literal("John")))); + } + + @Test + void should_build_range_query_for_comparison_expression() { + Expression[] params = {ref("age", INTEGER), literal(30)}; + Map ranges = ImmutableMap.of( + dsl.less(params), new Object[]{null, 30, true, false}, + dsl.greater(params), new Object[]{30, null, false, true}, + dsl.lte(params), new Object[]{null, 30, true, true}, + dsl.gte(params), new Object[]{30, null, true, true}); + + ranges.forEach((expr, range) -> + assertEquals( + "{\n" + + " \"range\" : {\n" + + " \"age\" : {\n" + + " \"from\" : " + range[0] + ",\n" + + " \"to\" : " + range[1] + ",\n" + + " \"include_lower\" : " + range[2] + ",\n" + + " \"include_upper\" : " + range[3] + ",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery(expr))); + } + + @Test + void should_build_wildcard_query_for_like_expression() { + assertEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"name\" : {\n" + + " \"wildcard\" : \"*John?\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + dsl.like( + ref("name", STRING), literal("%John_")))); + } + + @Test + void should_build_script_query_for_function_expression() { + doAnswer(invocation -> { + Expression expr = invocation.getArgument(0); + return expr.toString(); + }).when(serializer).serialize(any()); + + assertEquals( + "{\n" + + " \"script\" : {\n" + + " \"script\" : {\n" + + " \"source\" : \"abs(age) = 30\",\n" + + " \"lang\" : \"opendistro_expression\"\n" + + " },\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}", + buildQuery( + dsl.equal( + dsl.abs(ref("age", INTEGER)), literal(30)))); + } + + @Test + void should_build_script_query_for_comparison_between_fields() { + doAnswer(invocation -> { + Expression expr = invocation.getArgument(0); + return expr.toString(); + }).when(serializer).serialize(any()); + + assertEquals( + "{\n" + + " \"script\" : {\n" + + " \"script\" : {\n" + + " \"source\" : \"age1 = age2\",\n" + + " \"lang\" : \"opendistro_expression\"\n" + + " },\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}", + buildQuery( + dsl.equal( + ref("age1", INTEGER), ref("age2", INTEGER)))); + } + + @Test + void should_build_bool_query_for_and_or_expression() { + String[] names = { "filter", "should" }; + FunctionExpression expr1 = dsl.equal(ref("name", STRING), literal("John")); + FunctionExpression expr2 = dsl.equal(ref("age", INTEGER), literal(30)); + Expression[] exprs = { + dsl.and(expr1, expr2), + dsl.or(expr1, expr2) + }; + + for (int i = 0; i < names.length; i++) { + assertEquals( + "{\n" + + " \"bool\" : {\n" + + " \"" + names[i] + "\" : [\n" + + " {\n" + + " \"term\" : {\n" + + " \"name\" : {\n" + + " \"value\" : \"John\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " },\n" + + " {\n" + + " \"term\" : {\n" + + " \"age\" : {\n" + + " \"value\" : 30,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}", + buildQuery(exprs[i])); + } + } + + @Test + void should_build_bool_query_for_not_expression() { + assertEquals( + "{\n" + + " \"bool\" : {\n" + + " \"must_not\" : [\n" + + " {\n" + + " \"term\" : {\n" + + " \"age\" : {\n" + + " \"value\" : 30,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}", + buildQuery( + dsl.not( + dsl.equal( + ref("age", INTEGER), literal(30))))); + } + + @Test + void should_use_keyword_for_multi_field_in_equality_expression() { + assertEquals( + "{\n" + + " \"term\" : {\n" + + " \"name.keyword\" : {\n" + + " \"value\" : \"John\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + dsl.equal( + ref("name", ES_TEXT_KEYWORD), literal("John")))); + } + + @Test + void should_use_keyword_for_multi_field_in_like_expression() { + assertEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"name.keyword\" : {\n" + + " \"wildcard\" : \"John*\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + dsl.like( + ref("name", ES_TEXT_KEYWORD), literal("John%")))); + } + + private String buildQuery(Expression expr) { + return filterQueryBuilder.build(expr).toString(); + } + +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/LuceneQueryTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/LuceneQueryTest.java new file mode 100644 index 0000000000..33c47cd461 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/LuceneQueryTest.java @@ -0,0 +1,34 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +class LuceneQueryTest { + + @Test + void should_throw_exception_if_not_implemented() { + assertThrows(UnsupportedOperationException.class, () -> + new LuceneQuery(){}.doBuild(null, null, null)); + } + +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/RangeQueryTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/RangeQueryTest.java new file mode 100644 index 0000000000..c1ed7e5393 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/script/filter/lucene/RangeQueryTest.java @@ -0,0 +1,39 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.filter.lucene.RangeQuery.Comparison; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +class RangeQueryTest { + + @Test + void should_throw_exception_for_unsupported_comparison() { + // Note that since we do switch check on enum comparison, this should'be impossible + assertThrows(IllegalStateException.class, () -> + new RangeQuery(Comparison.BETWEEN) + .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); + } + +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/DefaultExpressionSerializerTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/DefaultExpressionSerializerTest.java new file mode 100644 index 0000000000..0691c5e0d2 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/serialization/DefaultExpressionSerializerTest.java @@ -0,0 +1,103 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionNodeVisitor; +import com.amazon.opendistroforelasticsearch.sql.expression.config.ExpressionConfig; +import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +class DefaultExpressionSerializerTest { + + /** + * Initialize function repository manually to avoid dependency on Spring container. + */ + private final DSL dsl = new ExpressionConfig().dsl(new ExpressionConfig().functionRepository()); + + private final ExpressionSerializer serializer = new DefaultExpressionSerializer(); + + @Test + public void can_serialize_and_deserialize_literals() { + Expression original = literal(10); + Expression actual = serializer.deserialize(serializer.serialize(original)); + assertEquals(original, actual); + } + + @Test + public void can_serialize_and_deserialize_references() { + Expression original = ref("name", STRING); + Expression actual = serializer.deserialize(serializer.serialize(original)); + assertEquals(original, actual); + } + + @Test + public void can_serialize_and_deserialize_predicates() { + Expression original = dsl.or(literal(true), dsl.less(literal(1), literal(2))); + Expression actual = serializer.deserialize(serializer.serialize(original)); + assertEquals(original, actual); + } + + @Disabled("Bypass until all functions become serializable") + @Test + public void can_serialize_and_deserialize_functions() { + Expression original = dsl.abs(literal(30.0)); + Expression actual = serializer.deserialize(serializer.serialize(original)); + assertEquals(original, actual); + } + + @Test + public void cannot_serialize_illegal_expression() { + Expression illegalExpr = new Expression() { + private final Object object = new Object(); // non-serializable + @Override + public ExprValue valueOf(Environment valueEnv) { + return null; + } + + @Override + public ExprType type() { + return null; + } + + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return null; + } + }; + assertThrows(IllegalStateException.class, () -> serializer.serialize(illegalExpr)); + } + + @Test + public void cannot_deserialize_illegal_expression_code() { + assertThrows(IllegalStateException.class, () -> serializer.deserialize("hello world")); + } + +} \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 5896cf54df..97601ba5d5 100644 --- a/gradle.properties +++ b/gradle.properties @@ -13,4 +13,4 @@ # permissions and limitations under the License. # -version=1.9.0 +version=1.10.0 diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 472eda4b82..622497dcf2 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -39,7 +39,10 @@ dependencies { testRuntimeOnly('org.junit.jupiter:junit-jupiter-engine:5.6.2') // JDBC drivers for comparison test. Somehow Apache Derby throws security permission exception. - testCompile group: 'com.amazon.opendistroforelasticsearch.client', name: 'opendistro-sql-jdbc', version: '1.8.0.0' + testCompile fileTree('../sql-jdbc/build/libs') { + include '*.jar' + builtBy 'compileJdbc' + } testCompile group: 'com.h2database', name: 'h2', version: '1.4.200' testCompile group: 'org.xerial', name: 'sqlite-jdbc', version: '3.28.0' //testCompile group: 'org.apache.derby', name: 'derby', version: '10.15.1.3' @@ -111,6 +114,9 @@ task integTestWithNewEngine(type: RestIntegTestTask) { exclude 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class' exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**' + + // Skip old semantic analyzer IT because analyzer in new engine has different behavior + exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/QueryAnalysisIT.class' } } @@ -183,3 +189,9 @@ testClusters.comparisonTest { plugin file(tasks.getByPath(':plugin:bundlePlugin').archiveFile) } +task compileJdbc(type:Exec) { + workingDir '../sql-jdbc/' + + commandLine './gradlew', 'build' + commandLine './gradlew', 'shadowJar' +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java index 333d864b06..53d040dd6b 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java @@ -156,9 +156,9 @@ private int nextId() { return testCaseId++; } - private void insertTestDataInBatch(DBConnection conn, String tableName, List testData) { - Iterator iterator = testData.iterator(); - String[] fieldNames = iterator.next(); // first row is header of column names + private void insertTestDataInBatch(DBConnection conn, String tableName, List testData) { + Iterator iterator = testData.iterator(); + String[] fieldNames = (String[]) iterator.next(); // first row is header of column names Iterators.partition(iterator, 100). forEachRemaining(batch -> conn.insert(tableName, fieldNames, batch)); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/DBConnection.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/DBConnection.java index 6779398be4..46eff96b6f 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/DBConnection.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/DBConnection.java @@ -48,7 +48,7 @@ public interface DBConnection { * @param columnNames column names * @param batch batch of rows */ - void insert(String tableName, String[] columnNames, List batch); + void insert(String tableName, String[] columnNames, List batch); /** * Fetch data from database. diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/ESConnection.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/ESConnection.java index 4ce4a7bde2..04dbecae0f 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/ESConnection.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/ESConnection.java @@ -66,7 +66,7 @@ public void drop(String tableName) { } @Override - public void insert(String tableName, String[] columnNames, List batch) { + public void insert(String tableName, String[] columnNames, List batch) { Request request = new Request("POST", "/" + tableName + "/_bulk?refresh=true"); request.setJsonEntity(buildBulkBody(columnNames, batch)); performRequest(request); @@ -96,9 +96,9 @@ private void performRequest(Request request) { } } - private String buildBulkBody(String[] columnNames, List batch) { + private String buildBulkBody(String[] columnNames, List batch) { StringBuilder body = new StringBuilder(); - for (String[] fieldValues : batch) { + for (Object[] fieldValues : batch) { JSONObject json = new JSONObject(); for (int i = 0; i < columnNames.length; i++) { json.put(columnNames[i], fieldValues[i]); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/JDBCConnection.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/JDBCConnection.java index ed731330b1..2aff6b982c 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/JDBCConnection.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/connection/JDBCConnection.java @@ -93,10 +93,10 @@ public void drop(String tableName) { } @Override - public void insert(String tableName, String[] columnNames, List batch) { + public void insert(String tableName, String[] columnNames, List batch) { try (Statement stmt = connection.createStatement()) { String names = String.join(",", columnNames); - for (String[] fieldValues : batch) { + for (Object[] fieldValues : batch) { stmt.addBatch(StringUtils.format( "INSERT INTO %s(%s) VALUES (%s)", tableName, names, getValueList(fieldValues))); } @@ -139,8 +139,9 @@ private String parseColumnNameAndTypesInSchemaJson(String schema) { collect(joining(",")); } - private String getValueList(String[] fieldValues) { + private String getValueList(Object[] fieldValues) { return Arrays.stream(fieldValues). + map(String::valueOf). map(val -> val.replace(SINGLE_QUOTE, DOUBLE_QUOTE)). map(val -> SINGLE_QUOTE + val + SINGLE_QUOTE). collect(joining(",")); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestDataSetTest.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestDataSetTest.java index 6ca49fa34c..6077e18f47 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestDataSetTest.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestDataSetTest.java @@ -29,41 +29,81 @@ public class TestDataSetTest { @Test public void testDataSetWithSingleColumnData() { - TestDataSet dataSet = new TestDataSet("test", "mappings", "hello\nworld\n123"); + String mappings = + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"field\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + + TestDataSet dataSet = new TestDataSet("test", mappings, "field\nhello\nworld\n123"); assertEquals("test", dataSet.getTableName()); - assertEquals("mappings", dataSet.getSchema()); + assertEquals(mappings, dataSet.getSchema()); assertThat( dataSet.getDataRows(), contains( - new String[] {"hello"}, - new String[] {"world"}, - new String[] {"123"} + new Object[] {"field"}, + new Object[] {"hello"}, + new Object[] {"world"}, + new Object[] {"123"} ) ); } @Test public void testDataSetWithMultiColumnsData() { - TestDataSet dataSet = new TestDataSet("test", "mappings", "hello,world\n123"); + String mappings = + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"field1\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"field2\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + + TestDataSet dataSet = new TestDataSet("test", mappings, + "field1,field2\nhello,123\nworld,456"); assertThat( dataSet.getDataRows(), contains( - new String[] {"hello", "world"}, - new String[] {"123"} + new Object[] {"field1", "field2"}, + new Object[] {"hello", 123}, + new Object[] {"world", 456} ) ); } @Test public void testDataSetWithEscapedComma() { - TestDataSet dataSet = new TestDataSet("test", "mappings", - "hello,\"hello,world,123\"\n123\n\"[abc,def,ghi]\",456"); + String mappings = + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"field\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + + TestDataSet dataSet = new TestDataSet("test", mappings, + "field\n\"hello,world,123\"\n123\n\"[abc,def,ghi]\""); assertThat( dataSet.getDataRows(), contains( - new String[] {"hello", "hello,world,123"}, - new String[] {"123"}, - new String[] {"[abc,def,ghi]", "456"} + new Object[] {"field"}, + new Object[] {"hello,world,123"}, + new Object[] {"123"}, + new Object[] {"[abc,def,ghi]"} ) ); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/testset/TestDataSet.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/testset/TestDataSet.java index ab5b77dc09..42036a2f2a 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/testset/TestDataSet.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/testset/TestDataSet.java @@ -18,9 +18,11 @@ import static com.amazon.opendistroforelasticsearch.sql.legacy.utils.StringUtils.unquoteSingleField; import static java.util.stream.Collectors.joining; +import com.amazon.opendistroforelasticsearch.sql.legacy.utils.StringUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.json.JSONObject; /** * Test data set @@ -29,12 +31,12 @@ public class TestDataSet { private final String tableName; private final String schema; - private final List dataRows; + private final List dataRows; public TestDataSet(String tableName, String schemaFileContent, String dataFileContent) { this.tableName = tableName; this.schema = schemaFileContent; - this.dataRows = splitColumns(dataFileContent, ','); + this.dataRows = convertStringDataToActualType(splitColumns(dataFileContent, ',')); } public String getTableName() { @@ -45,7 +47,7 @@ public String getSchema() { return schema; } - public List getDataRows() { + public List getDataRows() { return dataRows; } @@ -82,6 +84,56 @@ private List splitColumns(String content, char separator) { return result; } + /** + * Convert column string values (read from CSV file) to objects of its real type + * based on the type information in index mapping file. + */ + private List convertStringDataToActualType(List rows) { + JSONObject types = new JSONObject(schema); + String[] columnNames = rows.get(0); + + List result = new ArrayList<>(); + result.add(columnNames); + + rows.stream() + .skip(1) + .map(row -> convertStringArrayToObjectArray(types, columnNames, row)) + .forEach(result::add); + return result; + } + + private Object[] convertStringArrayToObjectArray(JSONObject types, String[] columnNames, String[] row) { + Object[] result = new Object[row.length]; + for (int i = 0; i < row.length; i++) { + String colName = columnNames[i]; + String colTypePath = "/mappings/properties/" + colName; + String colType = ((JSONObject) types.query(colTypePath)).getString("type"); + result[i] = convertStringToObject(colType, row[i]); + } + return result; + } + + private Object convertStringToObject(String type, String str) { + switch (type.toLowerCase()) { + case "text": + case "keyword": + case "date": + return str; + case "integer": + return Integer.valueOf(str); + case "float": + case "half_float": + return Float.valueOf(str); + case "double": + return Double.valueOf(str); + case "boolean": + return Boolean.valueOf(str); + default: + throw new IllegalStateException(StringUtils.format( + "Data type %s is not supported yet for value: %s", type, str)); + } + } + @Override public String toString() { int total = dataRows.size(); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java index cb5be363dc..273f366171 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java @@ -433,6 +433,7 @@ public void aggregationFunctionInHaving() throws IOException { // public void nestedAggregationFunctionInSelect() { // String query = String.format(Locale.ROOT, "SELECT SUM(SQRT(age)) FROM age GROUP BY age", TEST_INDEX_ACCOUNT); // } + @Ignore("New engine returns string type") @Test public void fieldsWithAlias() throws IOException { JSONObject response = executeQuery( diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryAnalysisIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryAnalysisIT.java index 4a5e6d4ccc..edfee9c82a 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryAnalysisIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryAnalysisIT.java @@ -133,7 +133,7 @@ public void nonExistingIndexAliasShouldThrowSemanticException() { public void indexJoinNonNestedFieldShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM elasticsearch-sql_test_index_bank b1, b1.firstname f1", - "Operator [JOIN] cannot work with [INDEX, TEXT]." + "Operator [JOIN] cannot work with [INDEX, KEYWORD]." ); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java index bdf5e09435..1f5c644f98 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java @@ -1648,6 +1648,7 @@ public void fieldCollapsingTest() throws IOException { Assert.assertEquals(21, hits.length()); } + @Ignore("New engine doesn't have 'alias' field in schema in response") @Test public void backticksQuotedIndexNameTest() throws Exception { TestUtils.createIndexByRestClient(client(), "bank_unquote", null); @@ -1754,6 +1755,7 @@ public void functionInCaseFieldShouldThrowESExceptionDueToIllegalScriptInJdbc() "For more details, please send request for Json format"); } + @Ignore("This is already supported in our new query engine") @Test public void functionCallWithIllegalScriptShouldThrowESExceptionInJdbc() { String response = executeQuery("select log(balance + 2) from " + TEST_INDEX_BANK, diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/RestIntegTestCase.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/RestIntegTestCase.java index 961485bd85..b75b618dc3 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/RestIntegTestCase.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/RestIntegTestCase.java @@ -80,7 +80,6 @@ public void setUpIndices() throws Exception { initClient(); } - increaseScriptMaxCompilationsRate(); init(); } @@ -154,14 +153,6 @@ protected synchronized void loadIndex(Index index) throws IOException { } } - /** - * Increase script.max_compilations_rate to large enough, which is only 75/5min by default. - * This issue is due to our painless script not using params passed to compiled script. - */ - private void increaseScriptMaxCompilationsRate() throws IOException { - updateClusterSetting("script.max_compilations_rate", "10000/1m", false); - } - /** * Provide for each test to load test index, data and other setup work */ diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java index aac3497165..1d157d7774 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java @@ -81,7 +81,6 @@ public void setUpIndices() throws Exception { initClient(); } - increaseScriptMaxCompilationsRate(); enableNewQueryEngine(); init(); } @@ -141,15 +140,6 @@ public static void cleanUpIndices() throws IOException { wipeAllClusterSettings(); } - /** - * Increase script.max_compilations_rate to large enough, which is only 75/5min by default. - * This issue is due to our painless script not using params passed to compiled script. - */ - private void increaseScriptMaxCompilationsRate() throws IOException { - updateClusterSettings( - new ClusterSetting("transient", "script.max_compilations_rate", "10000/1m")); - } - private void enableNewQueryEngine() throws IOException { boolean isEnabled = Boolean.parseBoolean(System.getProperty("enableNewEngine", "false")); if (isEnabled) { diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DedupCommandIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DedupCommandIT.java index 39e6d128e7..9a1d031046 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DedupCommandIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DedupCommandIT.java @@ -66,11 +66,11 @@ public void testKeepEmptyDedup() throws IOException { verifyDataRows( result, rows("Amber JOHnny", 39225), - rows("Hattie"), + rows("Hattie", null), rows("Nanette", 32838), rows("Dale", 4180), - rows("Elinor"), - rows("Virginia"), + rows("Elinor", null), + rows("Virginia", null), rows("Dillard", 48086)); } } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/OperatorIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/OperatorIT.java index 5e632ce3a0..d8a74e1025 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/OperatorIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/OperatorIT.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.elasticsearch.client.ResponseException; -import org.hamcrest.Matchers; import org.json.JSONObject; import org.junit.jupiter.api.Test; @@ -118,7 +117,8 @@ public void testArithmeticOperatorWithMissingValue() throws IOException { String.format( "source=%s | eval f = balance * 1 | fields f", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows( - result, rows(39225), rows(32838), rows(4180), rows(48086), rows(), rows(), rows()); + result, rows(39225), rows(32838), rows(4180), rows(48086), rows(JSONObject.NULL), + rows(JSONObject.NULL), rows(JSONObject.NULL)); } @Test @@ -273,19 +273,21 @@ public void testLikeOperator() throws IOException { } @Test - public void testBinaryPredicateWithNullValue() { - queryExecutionShouldThrowExceptionDueToNullOrMissingValue( - String.format("source=%s | where age < 32", TEST_INDEX_BANK_WITH_NULL_VALUES), - "invalid to call type operation on null value" - ); + public void testBinaryPredicateWithNullValue() throws IOException { + JSONObject result = + executeQuery( + String.format("source=%s | where age >= 36 | fields age", + TEST_INDEX_BANK_WITH_NULL_VALUES)); + verifyDataRows(result, rows(36), rows(36)); } @Test - public void testBinaryPredicateWithMissingValue() { - queryExecutionShouldThrowExceptionDueToNullOrMissingValue( - String.format("source=%s | where balance > 3000", TEST_INDEX_BANK_WITH_NULL_VALUES), - "invalid to call type operation on missing value" - ); + public void testBinaryPredicateWithMissingValue() throws IOException { + JSONObject result = + executeQuery( + String.format("source=%s | where balance > 40000 | fields balance", + TEST_INDEX_BANK_WITH_NULL_VALUES)); + verifyDataRows(result, rows(48086)); } private void queryExecutionShouldThrowExceptionDueToNullOrMissingValue( diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLIntegTestCase.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLIntegTestCase.java index 87c7cb9bd7..40c2c604d0 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLIntegTestCase.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLIntegTestCase.java @@ -15,7 +15,12 @@ package com.amazon.opendistroforelasticsearch.sql.ppl; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getResponseBody; +import static com.amazon.opendistroforelasticsearch.sql.plugin.rest.RestPPLQueryAction.QUERY_API_ENDPOINT; + import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; +import java.io.IOException; +import java.util.Locale; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -23,14 +28,8 @@ import org.json.JSONObject; import org.junit.Assert; -import java.io.IOException; -import java.util.Locale; - -import static com.amazon.opendistroforelasticsearch.sql.plugin.rest.RestPPLQueryAction.QUERY_API_ENDPOINT; -import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getResponseBody; - /** - * ES Rest integration test base for PPL testing + * ES Rest integration test base for PPL testing. */ public abstract class PPLIntegTestCase extends SQLIntegTestCase { @@ -54,6 +53,49 @@ protected Request buildRequest(String query) { return request; } + + protected void setQuerySizeLimit(Integer limit) throws IOException { + updateClusterSettings( + new ClusterSetting("persistent", "opendistro.query.size_limit", + limit.toString())); + } + + protected static JSONObject updateClusterSettings(ClusterSetting setting) throws IOException { + Request request = new Request("PUT", "/_cluster/settings"); + String persistentSetting = String.format(Locale.ROOT, + "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); + request.setJsonEntity(persistentSetting); + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + return new JSONObject(executeRequest(request)); + } + + protected static class ClusterSetting { + private final String type; + private final String name; + private final String value; + + public ClusterSetting(String type, String name, String value) { + this.type = type; + this.name = name; + this.value = (value == null) ? "null" : ("\"" + value + "\""); + } + + SQLIntegTestCase.ClusterSetting nullify() { + return new SQLIntegTestCase.ClusterSetting(type, name, null); + } + + @Override + public String toString() { + return "ClusterSetting{" + + "type='" + type + '\'' + + ", path='" + name + '\'' + + ", value='" + value + '\'' + + '}'; + } + } + private JSONObject jsonify(String text) { try { return new JSONObject(text); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLPluginIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLPluginIT.java index 86219cc5cd..b792c31bfb 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLPluginIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLPluginIT.java @@ -15,8 +15,13 @@ package com.amazon.opendistroforelasticsearch.sql.ppl; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasProperty; +import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import java.io.IOException; import java.util.Locale; import org.elasticsearch.client.Request; @@ -24,16 +29,20 @@ import org.elasticsearch.client.ResponseException; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; +import org.json.JSONObject; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class PPLPluginIT extends PPLIntegTestCase { - @Rule public ExpectedException exceptionRule = ExpectedException.none(); + @Rule + public ExpectedException exceptionRule = ExpectedException.none(); + + private static final String PERSISTENT = "persistent"; @Override protected void init() throws Exception { - wipeAllClusterSettings(); + loadIndex(Index.BANK); } @Test @@ -59,11 +68,41 @@ public void testQueryEndpointShouldOK() throws IOException { @Test public void testQueryEndpointShouldFail() throws IOException { exceptionRule.expect(ResponseException.class); - exceptionRule.expect(hasProperty("response", statusCode(500))); + exceptionRule.expect(hasProperty("response", statusCode(400))); client().performRequest(makePPLRequest("search invalid")); } + @Test + public void sqlEnableSettingsTest() throws IOException { + String query = + String.format("search source=%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK); + // enable by default + JSONObject result = executeQuery(query); + verifyDataRows(result, rows("Hattie")); + + // disable + updateClusterSettings(new ClusterSetting(PERSISTENT, "opendistro.ppl.enabled", "false")); + Response response = null; + try { + result = executeQuery(query); + } catch (ResponseException ex) { + response = ex.getResponse(); + } + + result = new JSONObject(TestUtils.getResponseBody(response)); + assertThat(result.getInt("status"), equalTo(400)); + JSONObject error = result.getJSONObject("error"); + assertThat(error.getString("reason"), equalTo("Invalid Query")); + assertThat(error.getString("details"), equalTo( + "Either opendistro.ppl.enabled or rest.action.multi.allow_explicit_index setting is " + + "false")); + assertThat(error.getString("type"), equalTo("IllegalAccessException")); + + // reset the setting + updateClusterSettings(new ClusterSetting(PERSISTENT, "opendistro.ppl.enabled", null)); + } + protected Request makePPLRequest(String query) { Request post = new Request("POST", "/_opendistro/_ppl"); post.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/ResourceMonitorIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/ResourceMonitorIT.java index 0514bb4320..7ae1b4d850 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/ResourceMonitorIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/ResourceMonitorIT.java @@ -41,9 +41,9 @@ public void queryExceedResourceLimitShouldFail() throws IOException { ResponseException exception = expectThrows(ResponseException.class, () -> executeQuery(query)); - assertEquals(500, exception.getResponse().getStatusLine().getStatusCode()); - assertThat(exception.getMessage(), Matchers.containsString("resource is not enough to run the" + - " query, quit.")); + assertEquals(503, exception.getResponse().getStatusLine().getStatusCode()); + assertThat(exception.getMessage(), Matchers.containsString("resource is not enough to run the" + + " query, quit.")); // update opendistro.ppl.query.memory_limit to default value 85% updateClusterSettings( diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/SettingsIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/SettingsIT.java new file mode 100644 index 0000000000..e372e135d9 --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/SettingsIT.java @@ -0,0 +1,53 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.ppl; + +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; + +import java.io.IOException; +import org.json.JSONObject; +import org.junit.jupiter.api.Test; + +public class SettingsIT extends PPLIntegTestCase { + + @Override + public void init() throws IOException { + loadIndex(Index.BANK); + loadIndex(Index.DOG); + } + + @Test + public void testQuerySizeLimit() throws IOException { + // Default setting, fetch 200 rows from source + JSONObject result = + executeQuery( + String.format( + "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + verifyDataRows(result, rows("Hattie"), rows("Elinor"), rows("Virginia")); + + // Fetch 1 rows from source + setQuerySizeLimit(1); + result = + executeQuery( + String.format( + "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + verifyDataRows(result, rows("Hattie")); + } +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StandaloneIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StandaloneIT.java index b017da6f63..a3e175fc4f 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StandaloneIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StandaloneIT.java @@ -19,6 +19,7 @@ import static com.amazon.opendistroforelasticsearch.sql.protocol.response.format.JsonResponseFormatter.Style.PRETTY; import com.amazon.opendistroforelasticsearch.sql.common.response.ResponseListener; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchRestClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.executor.ElasticsearchExecutionEngine; @@ -32,7 +33,9 @@ import com.amazon.opendistroforelasticsearch.sql.protocol.response.QueryResult; import com.amazon.opendistroforelasticsearch.sql.protocol.response.format.SimpleJsonResponseFormatter; import com.amazon.opendistroforelasticsearch.sql.storage.StorageEngine; +import com.google.common.collect.ImmutableMap; import java.io.IOException; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import org.elasticsearch.client.Node; import org.elasticsearch.client.Request; @@ -61,7 +64,7 @@ public void init() { ElasticsearchClient client = new ElasticsearchRestClient(restClient); AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.registerBean(StorageEngine.class, - () -> new ElasticsearchStorageEngine(client)); + () -> new ElasticsearchStorageEngine(client, defaultSettings())); context.registerBean(ExecutionEngine.class, () -> new ElasticsearchExecutionEngine(client, new ElasticsearchExecutionProtector(new AlwaysHealthyMonitor()))); context.register(PPLServiceConfig.class); @@ -110,7 +113,7 @@ private String executeByStandaloneQueryEngine(String query) { @Override public void onResponse(QueryResponse response) { - QueryResult result = new QueryResult(response.getResults()); + QueryResult result = new QueryResult(response.getSchema(), response.getResults()); String json = new SimpleJsonResponseFormatter(PRETTY).format(result); actual.set(json); } @@ -122,4 +125,17 @@ public void onFailure(Exception e) { }); return actual.get(); } + + private Settings defaultSettings() { + return new Settings() { + private final Map defaultSettings = new ImmutableMap.Builder() + .put(Key.QUERY_SIZE_LIMIT, 200) + .build(); + + @Override + public T getSettingValue(Key key) { + return (T) defaultSettings.get(key); + } + }; + } } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StatsCommandIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StatsCommandIT.java index 819f752e22..f9474ab11f 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StatsCommandIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/StatsCommandIT.java @@ -17,6 +17,7 @@ import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; +import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; import java.io.IOException; import org.junit.jupiter.api.Test; @@ -25,6 +26,7 @@ public class StatsCommandIT extends PPLIntegTestCase { @Override public void init() throws IOException { loadIndex(Index.ACCOUNT); + setQuerySizeLimit(2000); } @Test @@ -98,4 +100,32 @@ public void testStatsNested() throws IOException { + "}\n", result); } + + @Test + public void testStatsWhere() throws IOException { + String result = executeQueryToString(String.format( + "source=%s | stats sum(balance) as a by gender | where a > 13000000", TEST_INDEX_ACCOUNT)); + assertEquals( + "{\n" + + " \"schema\": [\n" + + " {\n" + + " \"name\": \"a\",\n" + + " \"type\": \"long\"\n" + + " },\n" + + " {\n" + + " \"name\": \"gender\",\n" + + " \"type\": \"string\"\n" + + " }\n" + + " ],\n" + + " \"total\": 1,\n" + + " \"datarows\": [[\n" + + " 13082527,\n" + + " \"M\"\n" + + " ]],\n" + + " \"size\": 1\n" + + "}\n", + result + ); + } + } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/WhereCommandIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/WhereCommandIT.java index cc74018d1c..0260e5f97e 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/WhereCommandIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/WhereCommandIT.java @@ -15,15 +15,14 @@ package com.amazon.opendistroforelasticsearch.sql.ppl; -import org.json.JSONObject; -import org.junit.jupiter.api.Test; - -import java.io.IOException; - import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.jupiter.api.Test; + public class WhereCommandIT extends PPLIntegTestCase { @Override @@ -53,6 +52,22 @@ public void testWhereWithMultiLogicalExpr() throws IOException { verifyDataRows(result, rows("Amber", "Duke", 32)); } + @Test + public void testMultipleWhereCommands() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s " + + "| where firstname='Amber' " + + "| fields lastname, age" + + "| where lastname='Duke' " + + "| fields age " + + "| where age=32 " + + "| fields age", + TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows(32)); + } + @Test public void testWhereEquivalentSortCommand() throws IOException { assertEquals( diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/IdentifierIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/IdentifierIT.java index 21d9614e25..3b5dc26bc5 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/IdentifierIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/IdentifierIT.java @@ -50,6 +50,34 @@ public void testQuotedIndexNames() throws IOException { queryAndAssertTheDoc("SELECT * FROM \"logs.2020.01\""); } + @Test + public void testSpecialFieldName() throws IOException { + new Index("test") + .addDoc("{\"@timestamp\": 10, \"dimensions:major_version\": 30}"); + + assertEquals( + "{\n" + + " \"schema\": [\n" + + " {\n" + + " \"name\": \"@timestamp\",\n" + + " \"type\": \"long\"\n" + + " },\n" + + " {\n" + + " \"name\": \"dimensions:major_version\",\n" + + " \"type\": \"long\"\n" + + " }\n" + + " ],\n" + + " \"total\": 1,\n" + + " \"datarows\": [[\n" + + " 10,\n" + + " 30\n" + + " ]],\n" + + " \"size\": 1\n" + + "}\n", + executeQuery("SELECT @timestamp, `dimensions:major_version` FROM test", "jdbc") + ); + } + private void createIndexWithOneDoc(String... indexNames) throws IOException { for (String indexName : indexNames) { new Index(indexName).addDoc("{\"age\": 30}"); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java index 5bfadb3c6f..ffc33c1324 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java @@ -54,7 +54,7 @@ public void testConv() throws IOException { @Test public void testCrc32() throws IOException { JSONObject result = executeQuery("select crc32('MySQL')"); - verifySchema(result, schema("crc32(\"MySQL\")", null, "long")); + verifySchema(result, schema("crc32('MySQL')", null, "long")); verifyDataRows(result, rows(3259397556L)); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java index 58c10073a1..e069141d4b 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java @@ -32,7 +32,7 @@ public class SQLCorrectnessIT extends CorrectnessTestBase { private static final String ROOT_DIR = "correctness/"; private static final String[] EXPR_TEST_DIR = { "expressions" }; - private static final String[] QUERY_TEST_DIR = { "queries"/*, "bugfixes"*/ }; //TODO: skip bugfixes folder for now since it fails + private static final String[] QUERY_TEST_DIR = { "queries", "bugfixes" }; @Override protected void init() throws Exception { @@ -71,6 +71,4 @@ private void verifyQueries(Path file, Function converter) { } } - - } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java index 50d803e020..ff3a2a6327 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java @@ -244,6 +244,8 @@ protected boolean matchesSafely(JSONArray array) { isEqual = ((JSONObject) expected).similar(array.get(i)); } else if (expected instanceof JSONArray) { isEqual = ((JSONArray) expected).similar(array.get(i)); + } else if (null == expected) { + isEqual = JSONObject.NULL == array.get(i); } else { isEqual = expected.equals(array.get(i)); } diff --git a/integ-test/src/test/resources/correctness/bugfixes/234.txt b/integ-test/src/test/resources/correctness/bugfixes/234.txt new file mode 100644 index 0000000000..3056acb199 --- /dev/null +++ b/integ-test/src/test/resources/correctness/bugfixes/234.txt @@ -0,0 +1,2 @@ +SELECT FlightNum FROM kibana_sample_data_flights where (AvgTicketPrice + 100) <= 1000 +SELECT FlightNum FROM kibana_sample_data_flights where ROUND(FlightTimeMin) > ABS(FlightDelayMin) \ No newline at end of file diff --git a/integ-test/src/test/resources/correctness/bugfixes/237.txt b/integ-test/src/test/resources/correctness/bugfixes/237.txt new file mode 100644 index 0000000000..5ffd41287c --- /dev/null +++ b/integ-test/src/test/resources/correctness/bugfixes/237.txt @@ -0,0 +1,3 @@ +SELECT ((Origin = 'Munich Airport') AND (Dest = 'Venice Marco Polo Airport')) AS Calculation_462181953506873347 FROM kibana_sample_data_flights +SELECT ((Origin = 'Munich Airport') OR (Origin = 'Itami Airport')) AS Calculation_462181953506873347 FROM kibana_sample_data_flights +SELECT NOT (Origin = 'Munich Airport') AS Calculation_462181953506873347 FROM kibana_sample_data_flights \ No newline at end of file diff --git a/integ-test/src/test/resources/correctness/bugfixes/368.txt b/integ-test/src/test/resources/correctness/bugfixes/368.txt new file mode 100644 index 0000000000..a81edeec97 --- /dev/null +++ b/integ-test/src/test/resources/correctness/bugfixes/368.txt @@ -0,0 +1,2 @@ +SELECT Origin FROM kibana_sample_data_flights WHERE Origin LIKE 'London Hea%' +SELECT Origin FROM kibana_sample_data_flights WHERE Origin LIKE '%International%' \ No newline at end of file diff --git a/integ-test/src/test/resources/correctness/bugfixes/521.txt b/integ-test/src/test/resources/correctness/bugfixes/521.txt index 72a27b01d7..f0ef35d198 100644 --- a/integ-test/src/test/resources/correctness/bugfixes/521.txt +++ b/integ-test/src/test/resources/correctness/bugfixes/521.txt @@ -1 +1 @@ -SELECT timestamp, COUNT(*) FROM kibana_sample_data_flights GROUP BY timestamp \ No newline at end of file +SELECT timestamp FROM kibana_sample_data_flights GROUP BY timestamp \ No newline at end of file diff --git a/integ-test/src/test/resources/correctness/bugfixes/690.txt b/integ-test/src/test/resources/correctness/bugfixes/690.txt new file mode 100644 index 0000000000..940087e79e --- /dev/null +++ b/integ-test/src/test/resources/correctness/bugfixes/690.txt @@ -0,0 +1,4 @@ +SELECT FlightNum, Origin FROM kibana_sample_data_flights WHERE NULL IS NULL +SELECT FlightNum, Origin FROM kibana_sample_data_flights WHERE NULL IS NOT NULL +SELECT FlightNum, Origin FROM kibana_sample_data_flights WHERE NULL IS NULL AND NULL IS NULL +SELECT FlightNum, Origin FROM kibana_sample_data_flights WHERE NULL IS NULL OR NULL IS NULL diff --git a/integ-test/src/test/resources/correctness/expressions/literals.txt b/integ-test/src/test/resources/correctness/expressions/literals.txt index 74710d917b..c56aa9ef05 100644 --- a/integ-test/src/test/resources/correctness/expressions/literals.txt +++ b/integ-test/src/test/resources/correctness/expressions/literals.txt @@ -2,3 +2,4 @@ true -4.567 -123,false +'ODFE' diff --git a/integ-test/src/test/resources/correctness/expressions/predicates.txt b/integ-test/src/test/resources/correctness/expressions/predicates.txt index e69de29bb2..9bc00c0be9 100644 --- a/integ-test/src/test/resources/correctness/expressions/predicates.txt +++ b/integ-test/src/test/resources/correctness/expressions/predicates.txt @@ -0,0 +1,7 @@ +true AND true AS bool +false AND true AS bool +false OR false AS bool +true or false AS bool +NOT true AS bool +NOT false AS bool +NOT (true AND false) AS bool \ No newline at end of file diff --git a/integ-test/src/test/resources/correctness/kibana_sample_data_flights.json b/integ-test/src/test/resources/correctness/kibana_sample_data_flights.json index d180b6283c..4ad67e88a6 100644 --- a/integ-test/src/test/resources/correctness/kibana_sample_data_flights.json +++ b/integ-test/src/test/resources/correctness/kibana_sample_data_flights.json @@ -11,7 +11,13 @@ "type": "keyword" }, "Dest": { - "type": "keyword" + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } }, "DestAirportID": { "type": "keyword" @@ -53,7 +59,13 @@ "type": "float" }, "Origin": { - "type": "keyword" + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } }, "OriginAirportID": { "type": "keyword" diff --git a/integ-test/src/test/resources/correctness/queries/select.txt b/integ-test/src/test/resources/correctness/queries/select.txt index adb7f40782..50372e4ff2 100644 --- a/integ-test/src/test/resources/correctness/queries/select.txt +++ b/integ-test/src/test/resources/correctness/queries/select.txt @@ -1,4 +1,15 @@ SELECT 1 + 2 FROM kibana_sample_data_flights -SELECT abs(-10) FROM kibana_sample_data_flights -SELECT DistanceMiles FROM kibana_sample_data_flights +SELECT Cancelled, AvgTicketPrice, FlightDelayMin, Carrier, timestamp FROM kibana_sample_data_flights +SELECT `Cancelled`, `AvgTicketPrice` FROM kibana_sample_data_flights +SELECT ABS(DistanceMiles), (FlightDelayMin * 2) - 3 FROM kibana_sample_data_flights +SELECT abs(DistanceMiles), Abs(FlightDelayMin) FROM kibana_sample_data_flights +SELECT Cancelled AS Cancel, AvgTicketPrice AS ATP FROM kibana_sample_data_flights +SELECT Cancelled AS `Cancel`, AvgTicketPrice AS "ATP" FROM kibana_sample_data_flights SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE AvgTicketPrice <= 500 +SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE NOT AvgTicketPrice <= 500 +SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE AvgTicketPrice <= 500 AND FlightDelayMin = 0 +SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE AvgTicketPrice <= 500 OR FlightDelayMin = 0 +SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE AvgTicketPrice + 100 <= 500 +SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE ABS(AvgTicketPrice * -2) > 1000 +SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE Carrier LIKE 'JetBeat_' +SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE Carrier LIKE '%Air%' diff --git a/integ-test/src/test/resources/indexDefinitions/bank_index_mapping.json b/integ-test/src/test/resources/indexDefinitions/bank_index_mapping.json index cd99b83800..a0609a04d2 100644 --- a/integ-test/src/test/resources/indexDefinitions/bank_index_mapping.json +++ b/integ-test/src/test/resources/indexDefinitions/bank_index_mapping.json @@ -26,7 +26,7 @@ "type": "text" }, "firstname": { - "type": "text" + "type": "keyword" }, "gender": { "type": "text", diff --git a/legacy/src/main/antlr/OpenDistroSqlLexer.g4 b/legacy/src/main/antlr/OpenDistroSqlLexer.g4 index af7e5eb0d0..6fdb8980a9 100644 --- a/legacy/src/main/antlr/OpenDistroSqlLexer.g4 +++ b/legacy/src/main/antlr/OpenDistroSqlLexer.g4 @@ -290,7 +290,7 @@ COLON_SYMB: ':'; // Literal Primitives START_NATIONAL_STRING_LITERAL: 'N' SQUOTA_STRING; -STRING_LITERAL: DQUOTA_STRING | SQUOTA_STRING | BQUOTA_STRING; +STRING_LITERAL: SQUOTA_STRING; DECIMAL_LITERAL: DEC_DIGIT+; HEXADECIMAL_LITERAL: 'X' '\'' (HEX_DIGIT HEX_DIGIT)+ '\'' | '0X' HEX_DIGIT+; @@ -317,6 +317,8 @@ DOT_ID: '.' ID_LITERAL; ID: ID_LITERAL; // DOUBLE_QUOTE_ID: '"' ~'"'+ '"'; REVERSE_QUOTE_ID: '`' ~'`'+ '`'; +DOUBLE_QUOTE_ID: DQUOTA_STRING; +BACKTICK_QUOTE_ID: BQUOTA_STRING; STRING_USER_NAME: ( SQUOTA_STRING | DQUOTA_STRING | BQUOTA_STRING | ID_LITERAL diff --git a/legacy/src/main/antlr/OpenDistroSqlParser.g4 b/legacy/src/main/antlr/OpenDistroSqlParser.g4 index 86c5c89d20..2fa33864e7 100644 --- a/legacy/src/main/antlr/OpenDistroSqlParser.g4 +++ b/legacy/src/main/antlr/OpenDistroSqlParser.g4 @@ -231,7 +231,8 @@ uid simpleId : ID | DOT_ID // note: the current scope by adding DOT_ID to simpleId is large, move DOT_ID upwards tablename if needed - | STRING_LITERAL + | DOUBLE_QUOTE_ID + | BACKTICK_QUOTE_ID | keywordsCanBeId | functionNameBase ; diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/ElasticJoinExecutor.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/ElasticJoinExecutor.java index c4826e0c14..fad5ba194f 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/ElasticJoinExecutor.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/ElasticJoinExecutor.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestStatus; @@ -219,7 +220,8 @@ protected SearchHit createUnmachedResult(List secondTableReturnedFields, Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields); + hit.getFields().forEach((fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); SearchHit searchHit = new SearchHit(docId, unmatchedId, unamatchedType, documentFields, metaFields); searchHit.sourceRef(hit.getSourceRef()); diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/HashJoinElasticExecutor.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/HashJoinElasticExecutor.java index 471ec86ef9..c1656c5898 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/HashJoinElasticExecutor.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/HashJoinElasticExecutor.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; @@ -185,7 +186,8 @@ private List createCombinedResults(TableInJoinRequestBuilder secondTa Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(matchingHit.getFields(), documentFields, metaFields); + matchingHit.getFields().forEach((fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); SearchHit searchHit = new SearchHit(matchingHit.docId(), combinedId, new Text(matchingHit.getType() + "|" + secondTableHit.getType()), documentFields, metaFields); @@ -245,7 +247,8 @@ private void createKeyToResultsAndFillOptimizationStructure( //int docid , id Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields); + hit.getFields().forEach((fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); SearchHit searchHit = new SearchHit(resultIds, hit.getId(), new Text(hit.getType()), documentFields , metaFields); searchHit.sourceRef(hit.getSourceRef()); diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java index 545d01db79..e87ffec9fd 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -176,7 +177,8 @@ private SearchHit getMergedHit(int currentCombinedResults, String t1Alias, Strin nestedLoopsRequest.getSecondTable().getOriginalSelect().isSelectAll()); Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(hitFromFirstTable.getFields(), documentFields, metaFields); + matchedHit.getFields().forEach((fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); SearchHit searchHit = new SearchHit(currentCombinedResults, hitFromFirstTable.getId() + "|" + matchedHit.getId(), new Text(hitFromFirstTable.getType() + "|" + matchedHit.getType()), documentFields, metaFields); diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/MinusExecutor.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/MinusExecutor.java index 3063544050..395331f3b8 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/MinusExecutor.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/MinusExecutor.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -136,7 +137,8 @@ private void fillMinusHitsFromOneField(String fieldName, Set fieldValues fields.put(fieldName, new DocumentField(fieldName, values)); Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(fields, documentFields, metaFields); + someHit.getFields().forEach((field, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) ? metaFields : documentFields).put(field, docField)); SearchHit searchHit = new SearchHit(currentId, currentId + "", new Text(someHit.getType()), documentFields, metaFields); searchHit.sourceRef(someHit.getSourceRef()); @@ -161,7 +163,8 @@ private void fillMinusHitsFromResults(Set comperableHitResu SearchHit originalHit = result.getOriginalHit(); Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(originalHit.getFields(), documentFields, metaFields); + originalHit.getFields().forEach((fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); SearchHit searchHit = new SearchHit(currentId, originalHit.getId(), new Text(originalHit.getType()), documentFields, metaFields); searchHit.sourceRef(originalHit.getSourceRef()); diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/UnionExecutor.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/UnionExecutor.java index f133b71cb0..6a59bc0197 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/UnionExecutor.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/multi/UnionExecutor.java @@ -24,6 +24,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -68,7 +69,8 @@ private void fillInternalSearchHits(List unionHits, SearchHit[] hits, for (SearchHit hit : hits) { Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields); + hit.getFields().forEach((fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); SearchHit searchHit = new SearchHit(currentId, hit.getId(), new Text(hit.getType()), documentFields, metaFields); searchHit.sourceRef(hit.getSourceRef()); searchHit.getSourceAsMap().clear(); diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/ElasticsearchSQLPluginConfig.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/ElasticsearchSQLPluginConfig.java index 45be68dc0a..2296fbd789 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/ElasticsearchSQLPluginConfig.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/ElasticsearchSQLPluginConfig.java @@ -17,6 +17,7 @@ package com.amazon.opendistroforelasticsearch.sql.legacy.plugin; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchNodeClient; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.executor.ElasticsearchExecutionEngine; @@ -40,6 +41,9 @@ public class ElasticsearchSQLPluginConfig { @Autowired private NodeClient nodeClient; + @Autowired + private Settings settings; + @Bean public ElasticsearchClient client() { return new ElasticsearchNodeClient(clusterService, nodeClient); @@ -47,7 +51,7 @@ public ElasticsearchClient client() { @Bean public StorageEngine storageEngine() { - return new ElasticsearchStorageEngine(client()); + return new ElasticsearchStorageEngine(client(), settings); } @Bean diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryAction.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryAction.java index 6f243e684b..8683600d60 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryAction.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryAction.java @@ -21,10 +21,12 @@ import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR; import static org.elasticsearch.rest.RestStatus.OK; -import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; import com.amazon.opendistroforelasticsearch.sql.common.response.ResponseListener; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.security.SecurityAccess; +import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlan; +import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; import com.amazon.opendistroforelasticsearch.sql.protocol.response.QueryResult; import com.amazon.opendistroforelasticsearch.sql.protocol.response.format.SimpleJsonResponseFormatter; import com.amazon.opendistroforelasticsearch.sql.sql.SQLService; @@ -57,9 +59,15 @@ public class RestSQLQueryAction extends BaseRestHandler { private final ClusterService clusterService; - public RestSQLQueryAction(ClusterService clusterService) { + /** + * Settings required by been initialization. + */ + private final Settings pluginSettings; + + public RestSQLQueryAction(ClusterService clusterService, Settings pluginSettings) { super(); this.clusterService = clusterService; + this.pluginSettings = pluginSettings; } @Override @@ -89,13 +97,17 @@ public RestChannelConsumer prepareRequest(SQLQueryRequest request, NodeClient no } SQLService sqlService = createSQLService(nodeClient); - UnresolvedPlan ast; + PhysicalPlan plan; try { - ast = sqlService.parse(request.getQuery()); + // For now analyzing and planning stage may throw syntax exception as well + // which hints the fallback to legacy code is necessary here. + plan = sqlService.plan( + sqlService.analyze( + sqlService.parse(request.getQuery()))); } catch (SyntaxCheckException e) { return NOT_SUPPORTED_YET; } - return channel -> sqlService.execute(ast, createListener(channel)); + return channel -> sqlService.execute(plan, createListener(channel)); } private SQLService createSQLService(NodeClient client) { @@ -103,6 +115,7 @@ private SQLService createSQLService(NodeClient client) { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.registerBean(ClusterService.class, () -> clusterService); context.registerBean(NodeClient.class, () -> client); + context.registerBean(Settings.class, () -> pluginSettings); context.register(ElasticsearchSQLPluginConfig.class); context.register(SQLServiceConfig.class); context.refresh(); @@ -116,7 +129,8 @@ private ResponseListener createListener(RestChannel channel) { return new ResponseListener() { @Override public void onResponse(QueryResponse response) { - sendResponse(OK, formatter.format(new QueryResult(response.getResults()))); + sendResponse(OK, formatter.format(new QueryResult(response.getSchema(), + response.getResults()))); } @Override diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSqlAction.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSqlAction.java index b715dc8364..6269b85e9e 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSqlAction.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSqlAction.java @@ -15,6 +15,16 @@ package com.amazon.opendistroforelasticsearch.sql.legacy.plugin; +import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.CURSOR_ENABLED; +import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.QUERY_ANALYSIS_ENABLED; +import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.QUERY_ANALYSIS_SEMANTIC_SUGGESTION; +import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.QUERY_ANALYSIS_SEMANTIC_THRESHOLD; +import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.SQL_ENABLED; +import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.SQL_NEW_ENGINE_ENABLED; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; +import static org.elasticsearch.rest.RestStatus.OK; +import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE; + import com.alibaba.druid.sql.parser.ParserException; import com.amazon.opendistroforelasticsearch.sql.legacy.antlr.OpenDistroSqlAnalyzer; import com.amazon.opendistroforelasticsearch.sql.legacy.antlr.SqlAnalysisConfig; @@ -43,6 +53,16 @@ import com.amazon.opendistroforelasticsearch.sql.legacy.utils.QueryDataAnonymizer; import com.amazon.opendistroforelasticsearch.sql.sql.domain.SQLQueryRequest; import com.google.common.collect.ImmutableList; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Predicate; +import java.util.regex.Pattern; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Client; @@ -56,26 +76,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; -import java.sql.SQLFeatureNotSupportedException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.Predicate; -import java.util.regex.Pattern; - -import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.QUERY_ANALYSIS_ENABLED; -import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.QUERY_ANALYSIS_SEMANTIC_SUGGESTION; -import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.QUERY_ANALYSIS_SEMANTIC_THRESHOLD; -import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.SQL_ENABLED; -import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.SqlSettings.SQL_NEW_ENGINE_ENABLED; -import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; -import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE; - public class RestSqlAction extends BaseRestHandler { private static final Logger LOG = LogManager.getLogger(RestSqlAction.class); @@ -96,18 +96,19 @@ public class RestSqlAction extends BaseRestHandler { */ private final RestSQLQueryAction newSqlQueryHandler; - public RestSqlAction(Settings settings, ClusterService clusterService) { + public RestSqlAction(Settings settings, ClusterService clusterService, + com.amazon.opendistroforelasticsearch.sql.common.setting.Settings pluginSettings) { super(); this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); - this.newSqlQueryHandler = new RestSQLQueryAction(clusterService); + this.newSqlQueryHandler = new RestSQLQueryAction(clusterService, pluginSettings); } @Override public List routes() { return ImmutableList.of( - new Route(RestRequest.Method.POST, QUERY_API_ENDPOINT), - new Route(RestRequest.Method.POST, EXPLAIN_API_ENDPOINT), - new Route(RestRequest.Method.POST, CURSOR_CLOSE_ENDPOINT) + new Route(RestRequest.Method.POST, QUERY_API_ENDPOINT), + new Route(RestRequest.Method.POST, EXPLAIN_API_ENDPOINT), + new Route(RestRequest.Method.POST, CURSOR_CLOSE_ENDPOINT) ); } @@ -145,7 +146,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli Format format = SqlRequestParam.getFormat(request.params()); - if (isNewEngineEnabled()) { + if (isNewEngineEnabled() && isCursorDisabled()) { // Route request to new query engine if it's supported already SQLQueryRequest newSqlRequest = new SQLQueryRequest(sqlRequest.getJsonContent(), sqlRequest.getSql(), @@ -266,6 +267,11 @@ private boolean isNewEngineEnabled() { return LocalClusterState.state().getSettingValue(SQL_NEW_ENGINE_ENABLED); } + private boolean isCursorDisabled() { + Boolean isEnabled = LocalClusterState.state().getSettingValue(CURSOR_ENABLED); + return Boolean.FALSE.equals(isEnabled); + } + private static ColumnTypeProvider performAnalysis(String sql) { LocalClusterState clusterState = LocalClusterState.state(); SqlAnalysisConfig config = new SqlAnalysisConfig( diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java index 4592bd45e6..7f162623df 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java @@ -19,6 +19,7 @@ import com.google.common.base.Strings; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.SearchHit; import java.util.HashMap; @@ -153,7 +154,8 @@ private Object getValueOfPath(Object source, String path, boolean isIgnoreFirstD private SearchHit cloneHit(Row other) { Map documentFields = new HashMap<>(); Map metaFields = new HashMap<>(); - SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields); + hit.getFields().forEach((fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); SearchHit combined = new SearchHit( hit.docId(), hit.getId() + "|" + (other == NULL ? "0" : ((SearchHitRow) other).hit.getId()), diff --git a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java index 1ec2ff1f8a..a3e5d3f642 100644 --- a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java +++ b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java @@ -172,4 +172,9 @@ public void queryWithBackticksQuotedFieldNameShouldPass() { validate("SELECT s.`age` FROM semantics AS s"); validate("SELECT `s`.`age` FROM semantics AS `s`"); } + + @Test + public void queryWithBackticksQuotedFieldNameInFunctionShouldPass() { + validate("SELECT SUM(`age`) FROM semantics"); + } } diff --git a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java index 9ed790bfcc..31e4b8845b 100644 --- a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java +++ b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; +import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; import com.amazon.opendistroforelasticsearch.sql.sql.domain.SQLQueryRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; @@ -40,6 +41,9 @@ public class RestSQLQueryActionTest { @Mock private NodeClient nodeClient; + @Mock + private Settings settings; + @Test public void handleQueryThatCanSupport() { SQLQueryRequest request = new SQLQueryRequest( @@ -48,7 +52,7 @@ public void handleQueryThatCanSupport() { QUERY_API_ENDPOINT, ""); - RestSQLQueryAction queryAction = new RestSQLQueryAction(clusterService); + RestSQLQueryAction queryAction = new RestSQLQueryAction(clusterService, settings); assertNotSame(NOT_SUPPORTED_YET, queryAction.prepareRequest(request, nodeClient)); } @@ -60,19 +64,19 @@ public void skipExplainThatNotSupport() { EXPLAIN_API_ENDPOINT, ""); - RestSQLQueryAction queryAction = new RestSQLQueryAction(clusterService); + RestSQLQueryAction queryAction = new RestSQLQueryAction(clusterService, settings); assertSame(NOT_SUPPORTED_YET, queryAction.prepareRequest(request, nodeClient)); } @Test public void skipQueryThatNotSupport() { SQLQueryRequest request = new SQLQueryRequest( - new JSONObject("{\"query\": \"SELECT * FROM test WHERE age = 10\"}"), - "SELECT * FROM test WHERE age = 10", + new JSONObject("{\"query\": \"SELECT * FROM test WHERE age = 10 GROUP BY age\"}"), + "SELECT * FROM test WHERE age = 10 GROUP BY age", QUERY_API_ENDPOINT, ""); - RestSQLQueryAction queryAction = new RestSQLQueryAction(clusterService); + RestSQLQueryAction queryAction = new RestSQLQueryAction(clusterService, settings); assertSame(NOT_SUPPORTED_YET, queryAction.prepareRequest(request, nodeClient)); } diff --git a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/util/CheckScriptContents.java b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/util/CheckScriptContents.java index 900e40c499..d71b4fd18c 100644 --- a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/util/CheckScriptContents.java +++ b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/util/CheckScriptContents.java @@ -59,6 +59,7 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -255,7 +256,7 @@ public static ClusterService mockClusterService(String mappings) { public static IndexNameExpressionResolver mockIndexNameExpressionResolver() { IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class); - when(mockResolver.concreteIndexNames(any(), any(), any())).thenAnswer( + when(mockResolver.concreteIndexNames(any(), any(), anyString())).thenAnswer( (Answer) invocation -> { // Return index expression directly without resolving Object indexExprs = invocation.getArguments()[2]; diff --git a/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/SQLPlugin.java index 4fe9b71980..3c247fb634 100644 --- a/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/SQLPlugin.java +++ b/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/SQLPlugin.java @@ -16,6 +16,8 @@ package com.amazon.opendistroforelasticsearch.sql.plugin; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.setting.ElasticsearchSettings; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.script.ExpressionScriptEngine; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.serialization.DefaultExpressionSerializer; import com.amazon.opendistroforelasticsearch.sql.legacy.esdomain.LocalClusterState; import com.amazon.opendistroforelasticsearch.sql.legacy.executor.AsyncRestExecutor; import com.amazon.opendistroforelasticsearch.sql.legacy.metrics.Metrics; @@ -47,16 +49,19 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -public class SQLPlugin extends Plugin implements ActionPlugin { +public class SQLPlugin extends Plugin implements ActionPlugin, ScriptPlugin { /** * Sql plugin specific settings in ES cluster settings. @@ -92,8 +97,8 @@ public List getRestHandlers(Settings settings, RestController restC Metrics.getInstance().registerDefaultMetrics(); return Arrays.asList( - new RestPPLQueryAction(restController, clusterService, pluginSettings), - new RestSqlAction(settings, clusterService), + new RestPPLQueryAction(restController, clusterService, pluginSettings, settings), + new RestSqlAction(settings, clusterService, pluginSettings), new RestSqlStatsAction(settings, restController), new RestSqlSettingsAction(settings, restController) ); @@ -144,4 +149,9 @@ public List> getSettings() { return settings; } + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ExpressionScriptEngine(new DefaultExpressionSerializer()); + } + } diff --git a/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/ElasticsearchPluginConfig.java b/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/ElasticsearchPluginConfig.java index cc9de58760..a0b7cea26b 100644 --- a/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/ElasticsearchPluginConfig.java +++ b/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/ElasticsearchPluginConfig.java @@ -57,7 +57,7 @@ public ElasticsearchClient client() { @Bean public StorageEngine storageEngine() { - return new ElasticsearchStorageEngine(client()); + return new ElasticsearchStorageEngine(client(), settings); } @Bean diff --git a/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/RestPPLQueryAction.java b/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/RestPPLQueryAction.java index 7062b31ccb..5f3f9ac763 100644 --- a/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/RestPPLQueryAction.java +++ b/plugin/src/main/java/com/amazon/opendistroforelasticsearch/sql/plugin/rest/RestPPLQueryAction.java @@ -16,12 +16,18 @@ package com.amazon.opendistroforelasticsearch.sql.plugin.rest; import static com.amazon.opendistroforelasticsearch.sql.protocol.response.format.JsonResponseFormatter.Style.PRETTY; -import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR; +import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; import static org.elasticsearch.rest.RestStatus.OK; +import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE; +import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; import com.amazon.opendistroforelasticsearch.sql.common.response.ResponseListener; import com.amazon.opendistroforelasticsearch.sql.common.setting.Settings; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error.ErrorMessageFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.security.SecurityAccess; +import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; +import com.amazon.opendistroforelasticsearch.sql.exception.QueryEngineException; +import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine.QueryResponse; import com.amazon.opendistroforelasticsearch.sql.plugin.request.PPLQueryRequestFactory; import com.amazon.opendistroforelasticsearch.sql.ppl.PPLService; @@ -32,10 +38,12 @@ import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.List; +import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -59,14 +67,20 @@ public class RestPPLQueryAction extends BaseRestHandler { */ private final Settings pluginSettings; + private final Supplier pplEnabled; + /** * Constructor of RestPPLQueryAction. */ public RestPPLQueryAction(RestController restController, ClusterService clusterService, - Settings pluginSettings) { + Settings pluginSettings, + org.elasticsearch.common.settings.Settings clusterSettings) { super(); this.clusterService = clusterService; this.pluginSettings = pluginSettings; + this.pplEnabled = + () -> MULTI_ALLOW_EXPLICIT_INDEX.get(clusterSettings) + && (Boolean) pluginSettings.getSettingValue(Settings.Key.PPL_ENABLED); } @Override @@ -83,6 +97,11 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient nodeClient) { + if (!pplEnabled.get()) { + return channel -> reportError(channel, new IllegalAccessException( + "Either opendistro.ppl.enabled or rest.action.multi.allow_explicit_index setting is false" + ), BAD_REQUEST); + } PPLService pplService = createPPLService(nodeClient); return channel -> pplService.execute( PPLQueryRequestFactory.getPPLRequest(request), createListener(channel)); @@ -119,13 +138,14 @@ private ResponseListener createListener(RestChannel channel) { return new ResponseListener() { @Override public void onResponse(QueryResponse response) { - sendResponse(OK, formatter.format(new QueryResult(response.getResults()))); + sendResponse(OK, formatter.format(new QueryResult(response.getSchema(), + response.getResults()))); } @Override public void onFailure(Exception e) { LOG.error("Error happened during query handling", e); - sendResponse(INTERNAL_SERVER_ERROR, formatter.format(e)); + reportError(channel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); } private void sendResponse(RestStatus status, String content) { @@ -143,4 +163,19 @@ private T doPrivileged(PrivilegedExceptionAction action) { } } + private void reportError(final RestChannel channel, final Exception e, final RestStatus status) { + channel.sendResponse(new BytesRestResponse(status, + ErrorMessageFactory.createErrorMessage(e, status.getStatus()).toString())); + } + + private static boolean isClientError(Exception e) { + return e instanceof NullPointerException + // NPE is hard to differentiate but more likely caused by bad query + || e instanceof IllegalArgumentException + || e instanceof IndexNotFoundException + || e instanceof SemanticCheckException + || e instanceof ExpressionEvaluationException + || e instanceof QueryEngineException + || e instanceof SyntaxCheckException; + } } diff --git a/ppl/build.gradle b/ppl/build.gradle index 6f55868cc2..062ddfd606 100644 --- a/ppl/build.gradle +++ b/ppl/build.gradle @@ -36,6 +36,7 @@ dependencies { compile project(':protocol') testCompile group: 'junit', name: 'junit', version: '4.12' + testCompile group: 'org.hamcrest', name: 'hamcrest-library', version: '2.1' testCompile group: 'org.mockito', name: 'mockito-core', version: '3.3.3' } diff --git a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLService.java b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLService.java index 0c3e437c68..08ca8f4383 100644 --- a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLService.java +++ b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLService.java @@ -29,6 +29,7 @@ import com.amazon.opendistroforelasticsearch.sql.ppl.domain.PPLQueryRequest; import com.amazon.opendistroforelasticsearch.sql.ppl.parser.AstBuilder; import com.amazon.opendistroforelasticsearch.sql.ppl.parser.AstExpressionBuilder; +import com.amazon.opendistroforelasticsearch.sql.ppl.utils.UnresolvedPlanHelper; import com.amazon.opendistroforelasticsearch.sql.storage.StorageEngine; import lombok.RequiredArgsConstructor; import org.antlr.v4.runtime.tree.ParseTree; @@ -55,7 +56,8 @@ public void execute(PPLQueryRequest request, ResponseListener lis UnresolvedPlan ast = cst.accept(new AstBuilder(new AstExpressionBuilder())); // 2.Analyze abstract syntax to generate logical plan - LogicalPlan logicalPlan = analyzer.analyze(ast, new AnalysisContext()); + LogicalPlan logicalPlan = analyzer.analyze(UnresolvedPlanHelper.addSelectAll(ast), + new AnalysisContext()); // 3.Generate optimal physical plan from logical plan PhysicalPlan physicalPlan = new Planner(storageEngine).plan(logicalPlan); diff --git a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java index f8c4b351e0..1834f98196 100644 --- a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java +++ b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java @@ -15,7 +15,6 @@ package com.amazon.opendistroforelasticsearch.sql.ppl.parser; -import static com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils.unquoteIdentifier; import static com.amazon.opendistroforelasticsearch.sql.ppl.antlr.parser.OpenDistroPPLParser.BinaryArithmeticContext; import static com.amazon.opendistroforelasticsearch.sql.ppl.antlr.parser.OpenDistroPPLParser.BooleanLiteralContext; import static com.amazon.opendistroforelasticsearch.sql.ppl.antlr.parser.OpenDistroPPLParser.CompareExprContext; @@ -193,7 +192,7 @@ public UnresolvedExpression visitQualifiedName(QualifiedNameContext ctx) { ctx.ident() .stream() .map(ParserRuleContext::getText) - .map(StringUtils::unquoteIdentifier) + .map(StringUtils::unquoteText) .collect(Collectors.toList()) ); } @@ -204,14 +203,14 @@ public UnresolvedExpression visitWcQualifiedName(WcQualifiedNameContext ctx) { ctx.wildcard() .stream() .map(ParserRuleContext::getText) - .map(StringUtils::unquoteIdentifier) + .map(StringUtils::unquoteText) .collect(Collectors.toList()) ); } @Override public UnresolvedExpression visitStringLiteral(StringLiteralContext ctx) { - return new Literal(unquoteIdentifier(ctx.getText()), DataType.STRING); + return new Literal(StringUtils.unquoteText(ctx.getText()), DataType.STRING); } @Override diff --git a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/ArgumentFactory.java b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/ArgumentFactory.java index aaa839d0b2..6167639ed5 100644 --- a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/ArgumentFactory.java +++ b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/ArgumentFactory.java @@ -15,7 +15,6 @@ package com.amazon.opendistroforelasticsearch.sql.ppl.utils; -import static com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils.unquoteIdentifier; import static com.amazon.opendistroforelasticsearch.sql.ppl.antlr.parser.OpenDistroPPLParser.BooleanLiteralContext; import static com.amazon.opendistroforelasticsearch.sql.ppl.antlr.parser.OpenDistroPPLParser.DedupCommandContext; import static com.amazon.opendistroforelasticsearch.sql.ppl.antlr.parser.OpenDistroPPLParser.FieldsCommandContext; @@ -27,6 +26,7 @@ import com.amazon.opendistroforelasticsearch.sql.ast.expression.Argument; import com.amazon.opendistroforelasticsearch.sql.ast.expression.DataType; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Literal; +import com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -140,7 +140,7 @@ private static Literal getArgumentValue(ParserRuleContext ctx) { ? new Literal(Integer.parseInt(ctx.getText()), DataType.INTEGER) : ctx instanceof BooleanLiteralContext ? new Literal(Boolean.valueOf(ctx.getText()), DataType.BOOLEAN) - : new Literal(unquoteIdentifier(ctx.getText()), DataType.STRING); + : new Literal(StringUtils.unquoteText(ctx.getText()), DataType.STRING); } } diff --git a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/UnresolvedPlanHelper.java b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/UnresolvedPlanHelper.java new file mode 100644 index 0000000000..6c2f97671f --- /dev/null +++ b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/UnresolvedPlanHelper.java @@ -0,0 +1,42 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.ppl.utils; + +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; +import com.amazon.opendistroforelasticsearch.sql.ast.tree.Project; +import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; +import com.google.common.collect.ImmutableList; +import lombok.experimental.UtilityClass; + +/** + * The helper to add select to {@link UnresolvedPlan} if needed. + */ +@UtilityClass +public class UnresolvedPlanHelper { + + /** + * Attach Select All to PPL commands if required. + */ + public UnresolvedPlan addSelectAll(UnresolvedPlan plan) { + if ((plan instanceof Project) && !((Project) plan).isExcluded()) { + return plan; + } else { + return new Project(ImmutableList.of(AllFields.of())).attach(plan); + } + } +} diff --git a/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLServiceTest.java b/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLServiceTest.java index 18df192acc..ed8449bd0c 100644 --- a/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLServiceTest.java +++ b/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/PPLServiceTest.java @@ -56,6 +56,9 @@ public class PPLServiceTest { @Mock private PhysicalPlan plan; + @Mock + private ExecutionEngine.Schema schema; + /** * Setup the test context. */ @@ -76,7 +79,7 @@ public void setUp() { public void testExecuteShouldPass() { doAnswer(invocation -> { ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(Collections.emptyList())); + listener.onResponse(new QueryResponse(schema, Collections.emptyList())); return null; }).when(executionEngine).execute(any(), any()); @@ -108,4 +111,19 @@ public void onFailure(Exception e) { } }); } + + @Test + public void test() { + pplService.execute(new PPLQueryRequest("search", null), new ResponseListener() { + @Override + public void onResponse(QueryResponse pplQueryResponse) { + Assert.fail(); + } + + @Override + public void onFailure(Exception e) { + + } + }); + } } \ No newline at end of file diff --git a/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/UnresolvedPlanHelperTest.java b/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/UnresolvedPlanHelperTest.java new file mode 100644 index 0000000000..3e4d5297b4 --- /dev/null +++ b/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/utils/UnresolvedPlanHelperTest.java @@ -0,0 +1,68 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.ppl.utils; + +import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.when; + +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; +import com.amazon.opendistroforelasticsearch.sql.ast.tree.Project; +import com.amazon.opendistroforelasticsearch.sql.ast.tree.Rename; +import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; +import java.util.Arrays; +import junit.framework.TestCase; +import org.hamcrest.Matchers; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class UnresolvedPlanHelperTest extends TestCase { + + @Test + public void addProjectForRenameOperator() { + Rename rename = Mockito.mock(Rename.class); + + UnresolvedPlan plan = UnresolvedPlanHelper.addSelectAll(rename); + assertTrue(plan instanceof Project); + } + + @Test + public void addProjectForProjectExcludeOperator() { + Project project = Mockito.mock(Project.class); + when(project.isExcluded()).thenReturn(true); + + UnresolvedPlan plan = UnresolvedPlanHelper.addSelectAll(project); + assertTrue(plan instanceof Project); + assertThat(((Project) plan).getProjectList(), Matchers.contains(AllFields.of())); + } + + @Test + public void dontAddProjectForProjectOperator() { + Project project = Mockito.mock(Project.class); + UnresolvedExpression expression = Mockito.mock(UnresolvedExpression.class); + when(project.isExcluded()).thenReturn(false); + when(project.getProjectList()).thenReturn(Arrays.asList(expression)); + + UnresolvedPlan plan = UnresolvedPlanHelper.addSelectAll(project); + assertTrue(plan instanceof Project); + assertThat(((Project) plan).getProjectList(), Matchers.contains(expression)); + } +} \ No newline at end of file diff --git a/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java b/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java index 389b259861..cc8b4d73bd 100644 --- a/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java +++ b/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java @@ -18,8 +18,8 @@ import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; +import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; import java.util.Collection; -import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; @@ -31,6 +31,7 @@ */ @RequiredArgsConstructor public class QueryResult implements Iterable { + private final ExecutionEngine.Schema schema; /** * Results which are collection of expression. @@ -52,13 +53,10 @@ public int size() { * @return mapping from column names to its expression type */ public Map columnNameTypes() { - if (exprValues.isEmpty()) { - return Collections.emptyMap(); - } - - // TODO: Need other way to extract header than inferring from data implicitly - Map tupleValue = getFirstTupleValue(); - return populateColumnNameAndTypes(tupleValue); + Map colNameTypes = new LinkedHashMap<>(); + schema.getColumns().forEach(column -> colNameTypes.put(column.getName(), + column.getExprType().typeName().toLowerCase())); + return colNameTypes; } @Override @@ -71,29 +69,10 @@ public Iterator iterator() { .iterator(); } - private Map getFirstTupleValue() { - // Assume expression is always tuple on first level - // and columns (keys) of all tuple values are exactly same - ExprValue firstValue = exprValues.iterator().next(); - return ExprValueUtils.getTupleValue(firstValue); - } - - private Map populateColumnNameAndTypes(Map tupleValue) { - // Use linked hashmap to maintain original order in tuple expression - Map colNameTypes = new LinkedHashMap<>(); - tupleValue.forEach((name, expr) -> colNameTypes.put(name, getTypeString(expr))); - return colNameTypes; - } - private Object[] convertExprValuesToValues(Collection exprValues) { return exprValues .stream() .map(ExprValue::value) .toArray(Object[]::new); } - - private String getTypeString(ExprValue exprValue) { - return exprValue.type().typeName().toLowerCase(); - } - } diff --git a/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResultTest.java b/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResultTest.java index 52cf905d9a..785b8949af 100644 --- a/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResultTest.java +++ b/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResultTest.java @@ -17,34 +17,45 @@ package com.amazon.opendistroforelasticsearch.sql.protocol.response; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.tupleValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Arrays; import java.util.Collections; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; class QueryResultTest { + private ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + + @Test void size() { - QueryResult response = new QueryResult(Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 40)) - )); + QueryResult response = new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 40)) + )); assertEquals(3, response.size()); } @Test void columnNameTypes() { - QueryResult response = new QueryResult(Collections.singletonList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - )); + QueryResult response = new QueryResult( + schema, + Collections.singletonList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)) + )); assertEquals( ImmutableMap.of("name", "string", "age", "integer"), @@ -54,17 +65,23 @@ void columnNameTypes() { @Test void columnNameTypesFromEmptyExprValues() { - QueryResult response = new QueryResult(Collections.emptyList()); - assertTrue(response.columnNameTypes().isEmpty()); + QueryResult response = new QueryResult( + schema, + Collections.emptyList()); + assertEquals( + ImmutableMap.of("name", "string", "age", "integer"), + response.columnNameTypes() + ); } - @Disabled("Need to figure out column headers in other way than inferring from data implicitly") @Test void columnNameTypesFromExprValuesWithMissing() { - QueryResult response = new QueryResult(Arrays.asList( - tupleValue(ImmutableMap.of("name", "John")), - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - )); + QueryResult response = new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John")), + tupleValue(ImmutableMap.of("name", "John", "age", 20)) + )); assertEquals( ImmutableMap.of("name", "string", "age", "integer"), @@ -74,10 +91,12 @@ void columnNameTypesFromExprValuesWithMissing() { @Test void iterate() { - QueryResult response = new QueryResult(Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)) - )); + QueryResult response = new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30)) + )); int i = 0; for (Object[] objects : response) { diff --git a/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java b/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java index bd9dc9a817..4090e0addd 100644 --- a/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java +++ b/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java @@ -16,23 +16,34 @@ package com.amazon.opendistroforelasticsearch.sql.protocol.response.format; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.stringValue; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.tupleValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.protocol.response.format.JsonResponseFormatter.Style.COMPACT; import static com.amazon.opendistroforelasticsearch.sql.protocol.response.format.JsonResponseFormatter.Style.PRETTY; import static org.junit.jupiter.api.Assertions.assertEquals; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; +import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; import com.amazon.opendistroforelasticsearch.sql.protocol.response.QueryResult; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Arrays; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; class SimpleJsonResponseFormatterTest { + private final ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( + new ExecutionEngine.Schema.Column("firstname", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + @Test void formatResponse() { QueryResult response = new QueryResult( + schema, Arrays.asList( tupleValue(ImmutableMap.of("firstname", "John", "age", 20)), tupleValue(ImmutableMap.of("firstname", "Smith", "age", 30)))); @@ -48,6 +59,7 @@ void formatResponse() { void formatResponsePretty() { QueryResult response = new QueryResult( + schema, Arrays.asList( tupleValue(ImmutableMap.of("firstname", "John", "age", 20)), tupleValue(ImmutableMap.of("firstname", "Smith", "age", 30)))); @@ -80,19 +92,20 @@ void formatResponsePretty() { formatter.format(response)); } - @Disabled("Need to figure out column headers in other way than inferring from data implicitly") @Test void formatResponseWithMissingValue() { QueryResult response = new QueryResult( + schema, Arrays.asList( - tupleValue(ImmutableMap.of("firstname", "John")), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", stringValue("John"), "age", LITERAL_MISSING)), tupleValue(ImmutableMap.of("firstname", "Smith", "age", 30)))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( "{\"schema\":[{\"name\":\"firstname\",\"type\":\"string\"}," + "{\"name\":\"age\",\"type\":\"integer\"}],\"total\":2," - + "\"datarows\":[{\"row\":[\"John\",null]},{\"row\":[\"Smith\",30]}],\"size\":2}", + + "\"datarows\":[[\"John\",null],[\"Smith\",30]],\"size\":2}", formatter.format(response)); } diff --git a/sql-cli/src/odfe_sql_cli/__init__.py b/sql-cli/src/odfe_sql_cli/__init__.py index bef8a15b09..64e220b617 100644 --- a/sql-cli/src/odfe_sql_cli/__init__.py +++ b/sql-cli/src/odfe_sql_cli/__init__.py @@ -12,4 +12,4 @@ express or implied. See the License for the specific language governing permissions and limitations under the License. """ -__version__ = "1.9.0.1" +__version__ = "1.10.0.0" diff --git a/sql-jdbc/build.gradle b/sql-jdbc/build.gradle index 0bf3cb26f0..9710fd6a10 100644 --- a/sql-jdbc/build.gradle +++ b/sql-jdbc/build.gradle @@ -32,7 +32,7 @@ plugins { group 'com.amazon.opendistroforelasticsearch.client' // keep version in sync with version in Driver source -version '1.9.0.1' +version '1.10.0.0' boolean snapshot = "true".equals(System.getProperty("build.snapshot", "true")); if (snapshot) { diff --git a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java index 0f74a5ed8d..43b1a374f3 100644 --- a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java +++ b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java @@ -19,7 +19,7 @@ public enum Version { // keep this in sync with the gradle version - Current(1, 9, 0, 1); + Current(1, 10, 0, 0); private int major; private int minor; diff --git a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java index eebaa1914e..6f754ca094 100644 --- a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java +++ b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java @@ -66,6 +66,7 @@ public enum ElasticsearchType { SCALED_FLOAT(JDBCType.DOUBLE, Double.class, 15, 25, true), KEYWORD(JDBCType.VARCHAR, String.class, 256, 0, false), TEXT(JDBCType.VARCHAR, String.class, Integer.MAX_VALUE, 0, false), + STRING(JDBCType.VARCHAR, String.class, Integer.MAX_VALUE, 0, false), IP(JDBCType.VARCHAR, String.class, 15, 0, false), NESTED(JDBCType.STRUCT, null, 0, 0, false), OBJECT(JDBCType.STRUCT, null, 0, 0, false), diff --git a/sql-odbc/.gitignore b/sql-odbc/.gitignore index ecaefb1c61..6f408e7c21 100644 --- a/sql-odbc/.gitignore +++ b/sql-odbc/.gitignore @@ -54,7 +54,10 @@ CMakeCache.txt CPackConfig.cmake CPackSourceConfig.cmake CTestTestfile.cmake +/build/ +/sdk-build32/ /sdk-build64/ +/cmake-build32/ /cmake-build64/ /src/PowerBIConnector/bin/Debug/ /src/PowerBIConnector/obj/ diff --git a/sql-odbc/BUILD_INSTRUCTIONS.md b/sql-odbc/BUILD_INSTRUCTIONS.md deleted file mode 100644 index 36c5202b36..0000000000 --- a/sql-odbc/BUILD_INSTRUCTIONS.md +++ /dev/null @@ -1,360 +0,0 @@ -# Elasticsearch ODBC Driver Build Instructions - -The ElasticsearchODBC driver can be build on Windows and Mac. - -## Setting up Dependencies - -The driver [source code](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-odbc) must be downloaded onto the system to build it. - -### Windows Dependencies - -Windows requires the following dependencies - -* [cmake](https://cmake.org/install/) -* [Visual Studio 2019](https://visualstudio.microsoft.com/vs/) (Other versions may work, but only 2019 has been tested) - -### Mac Dependencies - -Homebrew must be installed to manage packages, to install homebrew see the [homebrew homepage](https://brew.sh/). -Using homebrew, install the following packages using the command provided: ->brew install [package] -> ->* curl ->* cmake ->* libiodbc - -## Building the Driver - -Before building the driver, the build files for the system must be generated, this is done using cmake. - -### Providing AWS Credentials - -As project uses AWS services for AWS SIGV4 authentication, you must [provide AWS credentials](https://docs.aws.amazon.com/sdk-for-cpp/v1/developer-guide/credentials.html). - -### Setting up AWS SDK - -#### Windows -* Open Developer PowerShell for VS. -* Run aws_sdk_cpp_setup.ps1 script from the project's root directory. - -#### Mac -* Run aws_sdk_cpp_setup.sh script from the project's root directory. - -### Generating the Build Files - -Open the project's root directory in a command line interface of your choice. Execute ->**cmake ./src -D CMAKE_INSTALL_PREFIX=\/AWSSDK/** - -**Note:** It is desirable to not run cmake directly in the 'src' directory, because it will generate build files inline with code. - -### General CMake Options - -**BUILD_WITH_TESTS** - -(Defaults to ON) If disabled, all tests and and test dependencies will be excluded from build which will optimize the installer package size. This option can set with the command line (using `-D`). - -### Building with Windows - -Building the driver on Windows is done using **Visual Studio**. ->Open **global_make_list.sln** with **Visual Studio 2019**. -> ->* Set the **Solution Configuration** to **Release** ->* Set the **Solution Platform** to **x64** -> ->**Build the solution** by right clicking it in the **Solution Explorer** and selecting **Build Solution** - -### Building with Mac - -Building the driver on Mac is done using make. Using the CLI, enter: ->**make** - -## Output Files - -Building the driver will yield the driver, tests, and a library files (Windows only). - -### Output Location on Windows - -Compiling on Windows will output the tests and the driver to **bin64/Release** and the driver library file to **lib64/Release** directory. There are also some additional test infrastructure files which output to the **bin64/Release** directory and the **lib64/Release** directory. - -The driver can be consumed by linking to it using the library file (elasticodbc.lib in lib64/Release). BI tools can consume the driver by specifying the location of the dll (elasticodbc.dll in bin64/Release) in the [DSN](#setting-up-a-dsn). - -### Output Location on Mac - -Compiling on Mac will output the tests to **bin64** and the driver to **lib64**. There are also some additional test infrastructure files which output to the **lib64** directory. - -## Packaging installer - -Build the driver with `BUILD_WITH_TESTS` option disabled. - -#### Windows - -Open the project's build directory in Developer PowerShell for VS. -> msbuild .\PACKAGE.vcxproj -p:Configuration=Release - -Installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Windows.msi` will be generated in the build directory. - -#### Mac - -Run below command from the project's build directory. ->cpack . - -Installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Darwin.pkg` will be generated in the build directory. - -## Running Tests - -Tests can be **executed directly**, or by using the **Test Runner**. - -**NOTES:** - -* A test DSN named `test_dsn` must be set up in order for certain tests in ITODBCConnection to pass. To configure the DSN, see the instructions, below. -* Datasets must be loaded into Elasticsearch using [kibana](https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html). See the section on loading datasets below. - -### Windows Test DSN Setup - -1. Open `src/IntegrationTests/ITODBCConnection/test_dsn.reg`. - * This contains the registry entries needed for setting up `test_dsn`. -2. Do one of the following: - * As an Administrator, run a command prompt or Powershell and run `reg import <.reg-file>` to add the entries to your registry. - * Manually add the entries to your registry using Registry Editor. - -### Mac Test DSN Setup - -1. Open `src/IntegrationTests/ITODBCConnection/test_odbc.ini` and `src/IntegrationTests/ITODBCConnection/test_odbcinst.ini` - * These contain the minimal configuration necessary for setting up `test_dsn`. -2. Do one of the following: - * Add the following lines to your .bash_profile to point the driver to these files. - * `export ODBCINI=/src/IntegrationTests/ITODBCConnection/test_odbc.ini` - * `export ODBCINSTINI=/src/IntegrationTests/ITODBCConnection/test_odbcinst.ini` - * Manually add the entries to your existing `odbc.ini` and `odbcinst.ini` entries. (normally found at `~/.odbc.ini` and `~/.odbcinst.ini`) - -### Loading Test Datasets - -Loading a dataset requires an [elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) service running with [kibana](https://opendistro.github.io/for-elasticsearch-docs/docs/kibana/). If either of these are missing, please refer to the documentation on how to set them up. - -Note, if you wish to work with SSL/TLS, you need to configure Elasticsearch and Kibana to support it. See Working With SSL/TLS below. - -First load the sample datasets provided by kibana. - -1. Select home (top left corner) -2. Select 'Load a data set and a Kibana dashboard' -3. Select 'Add data' under 'Sample flight data' -4. Select 'Add data' under 'Sample eCommerce orders' -5. Select 'Add data' under 'Sample web logs' - -Then load custom data sets using the kibana console. -Select the wrench on the left control panel. Enter the following commands into the console and hit the play button after each one. - -```json -PUT /kibana_sample_data_types - { - "mappings": { - "properties": { - "type_boolean" : { "type": "boolean"}, - "type_byte" : { "type": "byte"}, - "type_short" : { "type": "short"}, - "type_integer" : { "type": "integer"}, - "type_long" : { "type": "long"}, - "type_half_float" : { "type": "half_float"}, - "type_float" : { "type": "float"}, - "type_double" : { "type": "double"}, - "type_scaled_float" : { "type": "scaled_float", "scaling_factor": 100 }, - "type_keyword" : { "type": "keyword"}, - "type_text" : { "type": "text"}, - "type_date" : { "type": "date"}, - "type_object" : { "type": "object"}, - "type_nested" : { "type": "nested"} - } - } - } -``` - -```json -POST /kibana_sample_data_types/_doc -{ - "type_boolean": true, - "type_byte" : -120, - "type_short" : -2000, - "type_integer" :-350000000, - "type_long" : -8010000000, - "type_half_float" : -2.115, - "type_float" : -3.1512, - "type_double" : -5335.2215, - "type_scaled_float" : -100.1, - "type_keyword" : "goodbye", - "type_text" : "planet", - "type_date" : "2016-02-21T12:23:52.803Z", - "type_object" : { "foo" : "bar" }, - "type_nested" : {"foo":"bar"} -} -``` - -```json -POST /kibana_sample_data_types/_doc -{ - "type_boolean": false, - "type_byte" : 100, - "type_short" : 1000, - "type_integer" : 250000000, - "type_long" : 8000000000, - "type_half_float" : 1.115, - "type_float" : 2.1512, - "type_double" : 25235.2215, - "type_scaled_float" : 100, - "type_keyword" : "hello", - "type_text" : "world", - "type_date" : "2018-07-22T12:23:52.803Z", - "type_object" : { "foo" : "bar" }, - "type_nested" : {"foo":"bar"} -} -``` - -### Working With SSL/TLS - -To disable SSL/TLS in the tests, the main CMakeLists.txt file must be edited. This can be found in the project 'src' directory. In the 'General compiler definitions' in the CMakeLists.txt file, USE_SSL is set. Remove this from the add_compile_definitions function to stop SSL/TLS from being used in the tests. - -To enable SSL/TLS on Elasticsearch, you must edit the Elasticsearch.yml file, found in the config directory of Elasticsearch. An example Elasticsearch yml file can be found in the dev folder of this project. The certificates specified MUST be in the config directory of the Elasticsearch instance. For more information, please refer to the [Elasticsearch security settings documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html). - -If you plan to use Kibana, as suggested for this project, you must also edit the Kibana settings. Notice, when specifying a certificate for Kibana, you do not need to place it in the Kibana config directory, but instead must provide the absolute path to it. An example Kibana.yml file can be found in the dev folder of this project. For more information, please refer to the [Kibana settings documentation](https://www.elastic.co/guide/en/kibana/current/settings.html). - -### Running Tests directly on Windows - -Tests can be executed directly using **Visual Studio** by setting the desired test as a **Start up Project** - ->* **Right click** the desired test project in the **Solution Explorer** ->* Select **Set as Startup Project** ->* Run the test by selecting **Local Windows Debugger** in the toolbar at the top of the application - -For more information, see the [Visual Studio Console Application documentation](https://docs.microsoft.com/en-us/cpp/build/vscpp-step-2-build?view=vs-2019). - -### Running Tests directly on Mac - -Tests can be executed using a command line interface. From the project root directory, enter: -> **bin64/** - -To execute a test. - -### Running Tests using the Test Runner - -The **Test Runner** requires [python](https://wiki.python.org/moin/BeginnersGuide/Download) to be installed on the system. Running the **Test Runner** will execute all the tests and compile a report with the results. The report indicates the execution status of all tests along with the execution time. To find error details of any failed test, hover over the test. - -#### Running Tests using the Test Runner on Windows - -Open the project's root directory in a command line interface of your choice. Execute ->**.\run_test_runner.bat** - -The **Test Runner** has been tried and tested with [Python3.8](https://www.python.org/downloads/release/python-380/) on **Windows systems**. Other versions of Python may work, but are untested. - -#### Running Tests using the Test Runner on Mac - -Open the project's root directory in a command line interface of your choice. Execute ->**./run_test_runner.sh** - -The **Test Runner** has been tried and tested with [Python3.7.6](https://www.python.org/downloads/release/python-376/) on **Mac systems**. Other versions of Python may work, but are untested. - -### Running Tests with Coverage (Mac only) - -(using a CMake script provided by George Cave (StableCoder) under the Apache 2.0 license, found [here](https://github.com/StableCoder/cmake-scripts/blob/master/code-coverage.cmake)) - -> **NOTE**: Before building with coverage, make sure the following directory is in your PATH environment variable: -> `/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin` - -To build the tests with code coverage enabled, set the `CODE_COVERAGE` variable to `ON` when preparing your CMake build. -```bash -cmake ... -DBUILD_WITH_TESTS=ON -DCODE_COVERAGE=ON -``` - -To get coverage for the driver library, you must use the `ccov-all` target, which runs all test suites and components with coverage. -```bash -make ccov-all -``` - -This will generate an HTML report at `/ccov/all-merged/index.html`, which can be opened in a web browser to view a summary of the overall code coverage, as well as line-by-line coverage for individual files. - -For more information interpreting this report, see https://clang.llvm.org/docs/SourceBasedCodeCoverage.html#interpreting-reports. - -## Setting up a DSN - -A **D**ata **S**ouce **N**ame is used to store driver information in the system. By storing the information in the system, the information does not need to be specified each time the driver connects. - -### Windows - -> To setup DSN, add following keys in the Registry -> - >* **HKEY_LOCAL_MACHINE/SOFTWARE/ ODBC/ODBC.INI** : Contains a key for each Data Source Name (DSN) - >* **HKEY_LOCAL_MACHINE/SOFTWARE/ ODBC/ODBC.INI/ODBC Data Sources** : Lists the data sources - >* **HKEY_LOCAL_MACHINE/SOFTWARE/ODBC/ODBCINST.INI** : Define each driver's name and setup location - >* **HKEY_LOCAL_MACHINE/SOFTWARE/ODBC/ODBCINST.INI/ODBC Drivers** : Lists the installed drivers. -> ->These keys can be added manually in the Registry Editor (Start > Run > Regedit) one by one. Alternatively, keys can be added together as follows: -> ->1. Modify the appropriate values for these keys in `src/IntegrationTests/ITODBCConnection/test_dsn.reg` ->2. Double click on the `test_dsn.reg` file. ->3. Click `Yes` on the confirmation window to add keys in the registry. - -### Mac - -**iODBC Administrator** can be used to setup a **DSN** on Mac. - -> 1. Open **iODBC Administrator** - > * **iODBC Administrator** is installed with **iODBC Driver Manager** and can be found by searching the **Spotlight** (or found in **/Applications**) -> 2. Go to the **ODBC Drivers** tab -> 3. Click **Add a Driver** - > * **Description of the Driver**: The driver name used for the **ODBC connections** (ex. *ElasticsearchODBC*) - > * **Driver File Name**: The path to the **driver file** (*< Project Directory >/lib64/libelasticodbc.dylib*) - > * **Setup File Name**: The path to the **driver file** (*< Project Directory >/lib64/libelasticodbc.dylib*) - > * Set as a **User** driver - > * Select **OK** to save the options -> 4. Go to the **User DSN** tab -> 5. Select **Add** - > * Choose the driver that was added in **Step 3** - > * **Data Source Name (DSN)**: The name of the DSN used to store connection options (ex. *ElasticsearchODBC*) - > * **Comment**: Not required - > * Add the following **key-value pairs** using the **'+'** button - > * **Host** | **localhost** // Or a different server endpoint - > * **Port** | **9200** // Or whatever your endpoints port is - > * **Username** | **admin** // Or whatever your endpoints username is - > * **Password** | **admin** // Or whatever your endpoints password is - > * Select **OK** to **save options** -> 6. Select **OK** to exit the **Administrator** - -If “General installer error” is encountered when saving the ODBC Driver, see Troubleshooting, below. - -## Working with Tableau - -[Tableau Desktop](https://www.tableau.com/products/desktop) must be installed on the target machine. - - 1. Open **Tableau Desktop** - 2. Select **More…** - 3. Select **Other Databases (ODBC)** - 4. In the **DSN drop-down**, select the *Elasticsearch DSN* you set up in the previous set of steps - 5. The options you added will *automatically* be filled into the **Connection Attributes** - 6. Select **Sign In** - 7. After a few seconds, Tableau will connect to your Elasticsearch server - -## Troubleshooting - -### iODBC Administrator: “General installer error” when saving new ODBC Driver - -Try the following: - -1. Create the folder ~/Library/ODBC, then try again -2. Create two files in ~/Library/ODBC, then open iODBC Administrator and verify the contents of **odbcinst.ini** and **odbc.ini** align with the format below. - * **odbcinst.ini** (will be found in **ODBC Drivers**) - >[ODBC Drivers] - \ = Installed - > - >[\] - Driver = \/lib64/libelasticodbc.dylib - Setup = \/lib64/libelasticodbc.dylib - - * **odbc.ini** (will be found in **User DSNs**) - >[ODBC Data Sources] - \ = \ - > - >[\] - Driver = \/lib64/libelasticodbc.dylib - Description = - Host = localhost - Port = 9200 - Username = admin - Password = admin diff --git a/sql-odbc/README.md b/sql-odbc/README.md index 86fc8353b7..1b7a3b6c55 100644 --- a/sql-odbc/README.md +++ b/sql-odbc/README.md @@ -14,6 +14,11 @@ The driver is compatible with ODBC 3.51. | Windows | Windows 10 | 32-bit, 64-bit | | MacOS | Catalina 10.15.4, Mojave 10.14.6 | 64-bit | +## Connectors + +* **Power BI Desktop**: [`OdfeSqlOdbcPBIConnector.mez`](./src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez) +* **Tableau**: [`odfe_sql_odbc.taco`](./src/TableauConnector/odfe_sql_odbc/odfe_sql_odbc.taco) + ## Installing the Driver You can use the installers generated as part of the most recent release. @@ -60,7 +65,7 @@ To setup a connection, the driver uses an ODBC connection string. Connection str ### Building -Please refer to the [build instructions](./BUILD_INSTRUCTIONS.md) for detailed build instructions on your platform. +Please refer to the [build instructions](./docs/dev/BUILD_INSTRUCTIONS.md) for detailed build instructions on your platform. If your PC is already setup to build the library, you can simply invoke cmake using > cmake ./src diff --git a/sql-odbc/aws_sdk_cpp_setup.ps1 b/sql-odbc/aws_sdk_cpp_setup.ps1 deleted file mode 100644 index ecc4aa6b65..0000000000 --- a/sql-odbc/aws_sdk_cpp_setup.ps1 +++ /dev/null @@ -1,30 +0,0 @@ -# -# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). -# You may not use this file except in compliance with the License. -# A copy of the License is located at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# or in the "license" file accompanying this file. This file is distributed -# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language governing -# permissions and limitations under the License. -# - -git clone "https://github.com/aws/aws-sdk-cpp.git" - -$prefix_path = (pwd).path - -mkdir sdk-build - -cd sdk-build - -cmake ..\\aws-sdk-cpp\\ -D CMAKE_INSTALL_PREFIX=$prefix_path\AWSSDK\ -D CMAKE_BUILD_TYPE=Release -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" - -msbuild ALL_BUILD.vcxproj /p:Configuration=Release - -msbuild INSTALL.vcxproj /p:Configuration=Release - -cd .. \ No newline at end of file diff --git a/sql-odbc/build_win_debug32.ps1 b/sql-odbc/build_win_debug32.ps1 index add9a03dfb..7e23ada173 100644 --- a/sql-odbc/build_win_debug32.ps1 +++ b/sql-odbc/build_win_debug32.ps1 @@ -1,27 +1,2 @@ -# Build AWS SDK -$BITNESS=32 - -# Compare Bitness for 32 -# $ARCH="Win32" - -mkdir sdk-build${BITNESS} -cd sdk-build${BITNESS} - -git clone "https://github.com/aws/aws-sdk-cpp.git" - -$prefix_path = (pwd).path -cmake .\aws-sdk-cpp -A Win32 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Debug -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" - -msbuild ALL_BUILD.vcxproj /m /p:Configuration=Debug -msbuild INSTALL.vcxproj /m /p:Configuration=Debug - -cd .. - -# # Configure Project -cmake -S src -B cmake-build${BITNESS} -A Win32 -D CMAKE_INSTALL_PREFIX=sdk-build${BITNESS}\AWSSDK\ -D BUILD_WITH_TESTS=ON - -# # Build Project -cmake --build .\cmake-build${BITNESS} --config Debug --parallel 4 - -cp .\sdk-build32\bin\Debug\* .\bin32\Debug -cp .\cmake-build32\bin\Debug\* .\bin32\Debug +$WORKING_DIR = (Get-Location).Path +.\scripts\build_windows.ps1 $WORKING_DIR Debug 32 diff --git a/sql-odbc/build_win_debug64.ps1 b/sql-odbc/build_win_debug64.ps1 index 998dffedca..ea7084bada 100644 --- a/sql-odbc/build_win_debug64.ps1 +++ b/sql-odbc/build_win_debug64.ps1 @@ -1,27 +1,2 @@ -# Build AWS SDK -$BITNESS = 64 - -# Compare Bitness for 32 -# $ARCH="x64" - -mkdir sdk-build${BITNESS} -cd sdk-build${BITNESS} - -git clone "https://github.com/aws/aws-sdk-cpp.git" - -$prefix_path = (pwd).path -cmake .\aws-sdk-cpp -A x64 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Debug -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" - -msbuild ALL_BUILD.vcxproj /m /p:Configuration=Debug -msbuild INSTALL.vcxproj /m /p:Configuration=Debug - -cd .. - -# # Configure Project -cmake -S src -B cmake-build${BITNESS} -A x64 -D CMAKE_INSTALL_PREFIX=sdk-build${BITNESS}\AWSSDK\ -D BUILD_WITH_TESTS=ON - -# # Build Project -cmake --build .\cmake-build${BITNESS} --config Debug --parallel 4 - -cp .\sdk-build64\bin\Debug\* .\bin64\Debug -cp .\cmake-build64\bin\Debug\* .\bin64\Debug +$WORKING_DIR = (Get-Location).Path +.\scripts\build_windows.ps1 $WORKING_DIR Debug 64 diff --git a/sql-odbc/build_win_release32.ps1 b/sql-odbc/build_win_release32.ps1 index b693778c94..4bcf4bd48e 100644 --- a/sql-odbc/build_win_release32.ps1 +++ b/sql-odbc/build_win_release32.ps1 @@ -1,27 +1,2 @@ -# Build AWS SDK -$BITNESS=32 - -# Compare Bitness for 32 -# $ARCH="Win32" - -mkdir sdk-build${BITNESS} -cd sdk-build${BITNESS} - -git clone "https://github.com/aws/aws-sdk-cpp.git" - -$prefix_path = (pwd).path -cmake .\aws-sdk-cpp -A Win32 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Release -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" - -msbuild ALL_BUILD.vcxproj /m /p:Configuration=Release -msbuild INSTALL.vcxproj /m /p:Configuration=Release - -cd .. - -# # Configure Project -cmake -S src -B cmake-build${BITNESS} -A Win32 -D CMAKE_INSTALL_PREFIX=sdk-build${BITNESS}\AWSSDK\ -D BUILD_WITH_TESTS=ON - -# # Build Project -cmake --build .\cmake-build${BITNESS} --config Release --parallel 4 - -cp .\sdk-build32\bin\Release\* .\bin32\Release -cp .\cmake-build32\bin\Release\* .\bin32\Release +$WORKING_DIR = (Get-Location).Path +.\scripts\build_windows.ps1 $WORKING_DIR Release 32 diff --git a/sql-odbc/build_win_release64.ps1 b/sql-odbc/build_win_release64.ps1 index 15a917219f..82b1199b33 100644 --- a/sql-odbc/build_win_release64.ps1 +++ b/sql-odbc/build_win_release64.ps1 @@ -1,27 +1,2 @@ -# Build AWS SDK -$BITNESS = 64 - -# Compare Bitness for 32 -# $ARCH="x64" - -mkdir sdk-build64 -cd .\sdk-build64 - -git clone "https://github.com/aws/aws-sdk-cpp.git" - -$prefix_path = (pwd).path -cmake .\aws-sdk-cpp -A x64 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Release -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" - -msbuild ALL_BUILD.vcxproj /m /p:Configuration=Release -msbuild INSTALL.vcxproj /m /p:Configuration=Release - -cd .. - -# # Configure Project -cmake -S src -B cmake-build64 -A x64 -D CMAKE_INSTALL_PREFIX=sdk-build64\AWSSDK\ -D BUILD_WITH_TESTS=ON - -# # Build Project -cmake --build .\cmake-build64 --config Release --parallel 4 - -cp .\sdk-build64\bin\Release\* .\bin64\Release -cp .\cmake-build64\bin\Release\* .\bin64\Release +$WORKING_DIR = (Get-Location).Path +.\scripts\build_windows.ps1 $WORKING_DIR Release 64 diff --git a/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md b/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md new file mode 100644 index 0000000000..f7d60cc949 --- /dev/null +++ b/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md @@ -0,0 +1,118 @@ +# ODFE SQL ODBC Driver Build Instructions + +## Windows + +### Dependencies + +* [cmake](https://cmake.org/install/) +* [Visual Studio 2019](https://visualstudio.microsoft.com/vs/) (Other versions may work, but only 2019 has been tested) +* [ODBC Driver source code](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-odbc) + +### Build + +#### with Visual Studio + +Run `./build_win_.ps1` to generate a VS2019 project for building/testing the driver. (the build may fail, but should still generate a `.sln` file) + +The solution can be found at `\build\odbc\build\global_make_list.sln`. + +#### with Developer Powershell + +Use `./build_win_.ps1` to build the driver from a Developer Powershell prompt. + +> A shortcut is installed on your system with Visual Studio (search for **"Developer Powershell for VS 2019"**) + +> Programs launched with this prompt (ex: VS Code) also have access to the Developer shell. + +### Build Output + +``` +build +└-- + └-- odbc + └-- bin + └-- + └-- build + └-- lib + └-- aws-sdk + └-- build + └-- install +``` + +* Driver DLL: `.\build\\odbc\bin\\odfesqlodbc.dll` +* Test binaries folder: `.\build\\odbc\bin\` + +### Packaging + +From a Developer Powershell, run: +``` +msbuild .\build\Release\odbc\PACKAGE.vcxproj -p:Configuration=Release +``` + +An installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Windows--bit.msi` will be generated in the build directory. + + +## Mac +(TODO: upgrade build scripts & documentation for Mac) + +### Dependencies + +Homebrew must be installed to manage packages, to install homebrew see the [homebrew homepage](https://brew.sh/). +Using homebrew, install the following packages using the command provided: +>brew install [package] +> +>* curl +>* cmake +>* libiodbc + +### Building the Driver + +From a Bash shell: + +`./build_mac_.sh` + +### Output Location on Mac + +Compiling on Mac will output the tests to **bin64** and the driver to **lib64**. There are also some additional test infrastructure files which output to the **lib64** directory. + +### Packaging + +Run below command from the project's build directory. +>cpack . + +Installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Darwin.pkg` will be generated in the build directory. + +## General Build Info + +### ODBC Driver CMake Options + +**BUILD_WITH_TESTS** + +(Defaults to ON) If disabled, all tests and and test dependencies will be excluded from build which will optimize the installer package size. This option can set with the command line (using `-D`). + +### Working With SSL/TLS + +To disable SSL/TLS in the tests, the main CMakeLists.txt file must be edited. This can be found in the project 'src' directory. In the 'General compiler definitions' in the CMakeLists.txt file, USE_SSL is set. Remove this from the add_compile_definitions function to stop SSL/TLS from being used in the tests. + +To enable SSL/TLS on Elasticsearch, you must edit the Elasticsearch.yml file, found in the config directory of Elasticsearch. An example Elasticsearch yml file can be found in the dev folder of this project. The certificates specified MUST be in the config directory of the Elasticsearch instance. For more information, please refer to the [Elasticsearch security settings documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html). + +If you plan to use Kibana, as suggested for this project, you must also edit the Kibana settings. Notice, when specifying a certificate for Kibana, you do not need to place it in the Kibana config directory, but instead must provide the absolute path to it. An example Kibana.yml file can be found in the dev folder of this project. For more information, please refer to the [Kibana settings documentation](https://www.elastic.co/guide/en/kibana/current/settings.html). + +### Setting up a DSN + +A **D**ata **S**ouce **N**ame is used to store driver information in the system. By storing the information in the system, the information does not need to be specified each time the driver connects. + +#### Windows + +> To setup DSN, add following keys in the Registry +> + >* **HKEY_LOCAL_MACHINE/SOFTWARE/ODBC/ODBC.INI** : Contains a key for each Data Source Name (DSN) + >* **HKEY_LOCAL_MACHINE/SOFTWARE/ODBC/ODBC.INI/ODBC Data Sources** : Lists the data sources + >* **HKEY_LOCAL_MACHINE/SOFTWARE/ODBC/ODBCINST.INI** : Define each driver's name and setup location + >* **HKEY_LOCAL_MACHINE/SOFTWARE/ODBC/ODBCINST.INI/ODBC Drivers** : Lists the installed drivers. +> +>These keys can be added manually in the Registry Editor (Start > Run > Regedit) one by one. Alternatively, keys can be added together as follows: +> +>1. Modify the appropriate values for these keys in `src/IntegrationTests/ITODBCConnection/test_dsn.reg` +>2. Double click on the `test_dsn.reg` file. +>3. Click `Yes` on the confirmation window to add keys in the registry. diff --git a/sql-odbc/docs/dev/datasets/kibana_sample_data_types.md b/sql-odbc/docs/dev/datasets/kibana_sample_data_types.md new file mode 100644 index 0000000000..5f2e0fc185 --- /dev/null +++ b/sql-odbc/docs/dev/datasets/kibana_sample_data_types.md @@ -0,0 +1,72 @@ +# kibana_sample_data_types + +## Mapping + +```json +PUT /kibana_sample_data_types +{ + "mappings": { + "properties": { + "type_boolean" : { "type": "boolean"}, + "type_byte" : { "type": "byte"}, + "type_short" : { "type": "short"}, + "type_integer" : { "type": "integer"}, + "type_long" : { "type": "long"}, + "type_half_float" : { "type": "half_float"}, + "type_float" : { "type": "float"}, + "type_double" : { "type": "double"}, + "type_scaled_float" : { + "type": "scaled_float", + "scaling_factor": 100 + }, + "type_keyword" : { "type": "keyword"}, + "type_text" : { "type": "text"}, + "type_date" : { "type": "date"}, + "type_object" : { "type": "object"}, + "type_nested" : { "type": "nested"} + } + } +} +``` + +## Data + +```json +POST /kibana_sample_data_types/_doc +{ + "type_boolean": true, + "type_byte" : -120, + "type_short" : -2000, + "type_integer" :-350000000, + "type_long" : -8010000000, + "type_half_float" : -2.115, + "type_float" : -3.1512, + "type_double" : -5335.2215, + "type_scaled_float" : -100.1, + "type_keyword" : "goodbye", + "type_text" : "planet", + "type_date" : "2016-02-21T12:23:52.803Z", + "type_object" : { "foo" : "bar" }, + "type_nested" : {"foo":"bar"} +} +``` + +```json +POST /kibana_sample_data_types/_doc +{ + "type_boolean": false, + "type_byte" : 100, + "type_short" : 1000, + "type_integer" : 250000000, + "type_long" : 8000000000, + "type_half_float" : 1.115, + "type_float" : 2.1512, + "type_double" : 25235.2215, + "type_scaled_float" : 100, + "type_keyword" : "hello", + "type_text" : "world", + "type_date" : "2018-07-22T12:23:52.803Z", + "type_object" : { "foo" : "bar" }, + "type_nested" : {"foo":"bar"} +} +``` \ No newline at end of file diff --git a/sql-odbc/docs/dev/run_tests.md b/sql-odbc/docs/dev/run_tests.md new file mode 100644 index 0000000000..903e5f8f0a --- /dev/null +++ b/sql-odbc/docs/dev/run_tests.md @@ -0,0 +1,106 @@ +# ODFE SQL ODBC Driver Testing + +## Preparation + +* Latest version of [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) + +### Loading Test Datasets + +Loading a dataset requires an [elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) service running with [kibana](https://opendistro.github.io/for-elasticsearch-docs/docs/kibana/). If either of these are missing, please refer to the documentation on how to set them up. + +Note, if you wish to work with SSL/TLS, you need to configure Elasticsearch and Kibana to support it. See Working With SSL/TLS below. + +First load the sample datasets provided by kibana. + +1. Select home (top left corner) +2. Select 'Load a data set and a Kibana dashboard' +3. Select 'Add data' under 'Sample flight data' +4. Select 'Add data' under 'Sample eCommerce orders' +5. Select 'Add data' under 'Sample web logs' + +Then load the following custom data sets using the kibana console. +Select the wrench on the left control panel. Enter the following commands into the console and hit the play button after each one. + +* [kibana_sample_data_types](./datasets/kibana_sample_data_types.md) + +## Running Tests + +Tests can be **executed directly**, or by using the **Test Runner**. + +**NOTES:** + +* A test DSN named `test_dsn` must be set up in order for certain tests in ITODBCConnection to pass. To configure the DSN, see the instructions, below. +* Datasets must be loaded into Elasticsearch using [kibana](https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html). See the section on loading datasets below. + +### Windows Test DSN Setup + +1. Open `src/IntegrationTests/ITODBCConnection/test_dsn.reg`. + * This contains the registry entries needed for setting up `test_dsn`. +2. Do one of the following: + * As an Administrator, run a command prompt or Powershell and run `reg import <.reg-file>` to add the entries to your registry. + * Manually add the entries to your registry using Registry Editor. + +### Mac Test DSN Setup + +1. Open `src/IntegrationTests/ITODBCConnection/test_odbc.ini` and `src/IntegrationTests/ITODBCConnection/test_odbcinst.ini` + * These contain the minimal configuration necessary for setting up `test_dsn`. +2. Do one of the following: + * Add the following lines to your .bash_profile to point the driver to these files. + * `export ODBCINI=/src/IntegrationTests/ITODBCConnection/test_odbc.ini` + * `export ODBCINSTINI=/src/IntegrationTests/ITODBCConnection/test_odbcinst.ini` + * Manually add the entries to your existing `odbc.ini` and `odbcinst.ini` entries. (normally found at `~/.odbc.ini` and `~/.odbcinst.ini`) + +### Running Tests directly on Windows + +Tests can be executed directly using **Visual Studio** by setting the desired test as a **Start up Project** + +>* **Right click** the desired test project in the **Solution Explorer** +>* Select **Set as Startup Project** +>* Run the test by selecting **Local Windows Debugger** in the toolbar at the top of the application + +For more information, see the [Visual Studio Console Application documentation](https://docs.microsoft.com/en-us/cpp/build/vscpp-step-2-build?view=vs-2019). + +### Running Tests directly on Mac + +Tests can be executed using a command line interface. From the project root directory, enter: +> **bin64/** + +### Running Tests using the Test Runner + +The **Test Runner** requires [python](https://wiki.python.org/moin/BeginnersGuide/Download) to be installed on the system. Running the **Test Runner** will execute all the tests and compile a report with the results. The report indicates the execution status of all tests along with the execution time. To find error details of any failed test, hover over the test. + +#### Running Tests using the Test Runner on Windows + +Open the project's root directory in a command line interface of your choice. Execute +>**.\run_test_runner.bat** + +The **Test Runner** has been tried and tested with [Python3.8](https://www.python.org/downloads/release/python-380/) on **Windows systems**. Other versions of Python may work, but are untested. + +#### Running Tests using the Test Runner on Mac + +Open the project's root directory in a command line interface of your choice. Execute +>**./run_test_runner.sh** + +The **Test Runner** has been tried and tested with [Python3.7.6](https://www.python.org/downloads/release/python-376/) on **Mac systems**. Other versions of Python may work, but are untested. + +### Run with Coverage (Mac only) + +(using a CMake script provided by George Cave (StableCoder) under the Apache 2.0 license, found [here](https://github.com/StableCoder/cmake-scripts/blob/master/code-coverage.cmake)) + +> **NOTE**: Before building with coverage, make sure the following directory is in your PATH environment variable: +> `/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin` + +To build the tests with code coverage enabled, set the `CODE_COVERAGE` variable to `ON` when preparing your CMake build. +```bash +cmake ... -DBUILD_WITH_TESTS=ON -DCODE_COVERAGE=ON +``` + +To get coverage for the driver library, you must use the `ccov-all` target, which runs all test suites and components with coverage. +```bash +make ccov-all +``` + +This will generate an HTML report at `/ccov/all-merged/index.html`, which can be opened in a web browser to view a summary of the overall code coverage, as well as line-by-line coverage for individual files. + +For more information interpreting this report, see https://clang.llvm.org/docs/SourceBasedCodeCoverage.html#interpreting-reports. + diff --git a/sql-odbc/docs/test/img/microsoft_query_add_tables.png b/sql-odbc/docs/test/img/microsoft_query_add_tables.png new file mode 100644 index 0000000000..6bc517f038 Binary files /dev/null and b/sql-odbc/docs/test/img/microsoft_query_add_tables.png differ diff --git a/sql-odbc/docs/test/img/microsoft_query_table_options.png b/sql-odbc/docs/test/img/microsoft_query_table_options.png new file mode 100644 index 0000000000..cd673ac584 Binary files /dev/null and b/sql-odbc/docs/test/img/microsoft_query_table_options.png differ diff --git a/sql-odbc/docs/test/img/query_wizard_choose_coulms.png b/sql-odbc/docs/test/img/query_wizard_choose_coulms.png new file mode 100644 index 0000000000..073b7b7f00 Binary files /dev/null and b/sql-odbc/docs/test/img/query_wizard_choose_coulms.png differ diff --git a/sql-odbc/docs/test/img/query_wizard_error_popup.png b/sql-odbc/docs/test/img/query_wizard_error_popup.png new file mode 100644 index 0000000000..2595bbdcfd Binary files /dev/null and b/sql-odbc/docs/test/img/query_wizard_error_popup.png differ diff --git a/sql-odbc/docs/test/img/query_wizard_table_options.png b/sql-odbc/docs/test/img/query_wizard_table_options.png new file mode 100644 index 0000000000..30a3aa3586 Binary files /dev/null and b/sql-odbc/docs/test/img/query_wizard_table_options.png differ diff --git a/sql-odbc/docs/test/power_bi_manual_test_plan.xlsx b/sql-odbc/docs/test/power_bi_manual_test_plan.xlsx new file mode 100644 index 0000000000..a1948253cb Binary files /dev/null and b/sql-odbc/docs/test/power_bi_manual_test_plan.xlsx differ diff --git a/sql-odbc/docs/user/img/pbi_connection_string_options.png b/sql-odbc/docs/user/img/pbi_connection_string_options.png index 57b763a856..6cc19c61f1 100644 Binary files a/sql-odbc/docs/user/img/pbi_connection_string_options.png and b/sql-odbc/docs/user/img/pbi_connection_string_options.png differ diff --git a/sql-odbc/docs/user/img/pbi_disable_parallel_loading_tables.png b/sql-odbc/docs/user/img/pbi_disable_parallel_loading_tables.png new file mode 100644 index 0000000000..4e12022e9e Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_disable_parallel_loading_tables.png differ diff --git a/sql-odbc/docs/user/img/pbi_error_conn.png b/sql-odbc/docs/user/img/pbi_error_conn.png index 87f908a397..261999ab80 100644 Binary files a/sql-odbc/docs/user/img/pbi_error_conn.png and b/sql-odbc/docs/user/img/pbi_error_conn.png differ diff --git a/sql-odbc/docs/user/img/pbi_gateway_connector_path.png b/sql-odbc/docs/user/img/pbi_gateway_connector_path.png new file mode 100644 index 0000000000..2a391372ad Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_gateway_connector_path.png differ diff --git a/sql-odbc/docs/user/img/pbi_gateway_status.png b/sql-odbc/docs/user/img/pbi_gateway_status.png new file mode 100644 index 0000000000..c7f4272ad6 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_gateway_status.png differ diff --git a/sql-odbc/docs/user/img/pbi_publish_report.png b/sql-odbc/docs/user/img/pbi_publish_report.png new file mode 100644 index 0000000000..a04e66aeb1 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_publish_report.png differ diff --git a/sql-odbc/docs/user/img/pbi_publish_status.png b/sql-odbc/docs/user/img/pbi_publish_status.png new file mode 100644 index 0000000000..8978c94860 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_publish_status.png differ diff --git a/sql-odbc/docs/user/img/pbi_select_workspace.png b/sql-odbc/docs/user/img/pbi_select_workspace.png new file mode 100644 index 0000000000..8d89fe3073 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_select_workspace.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_cluster_setting.png b/sql-odbc/docs/user/img/pbi_service_cluster_setting.png new file mode 100644 index 0000000000..8c6f46f5b8 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_cluster_setting.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_data_source.png b/sql-odbc/docs/user/img/pbi_service_data_source.png new file mode 100644 index 0000000000..dbee689ec0 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_data_source.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_data_source_success.png b/sql-odbc/docs/user/img/pbi_service_data_source_success.png new file mode 100644 index 0000000000..aa33429a96 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_data_source_success.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_dataset_gateway.png b/sql-odbc/docs/user/img/pbi_service_dataset_gateway.png new file mode 100644 index 0000000000..ce739a5fd9 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_dataset_gateway.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_dataset_refresh.png b/sql-odbc/docs/user/img/pbi_service_dataset_refresh.png new file mode 100644 index 0000000000..4170494d8a Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_dataset_refresh.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_dataset_refresh_history.png b/sql-odbc/docs/user/img/pbi_service_dataset_refresh_history.png new file mode 100644 index 0000000000..3810003117 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_dataset_refresh_history.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_edit_reoprt.png b/sql-odbc/docs/user/img/pbi_service_edit_reoprt.png new file mode 100644 index 0000000000..0e8aff6a63 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_edit_reoprt.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_modified_report.png b/sql-odbc/docs/user/img/pbi_service_modified_report.png new file mode 100644 index 0000000000..098485fba7 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_modified_report.png differ diff --git a/sql-odbc/docs/user/img/pbi_service_setting.png b/sql-odbc/docs/user/img/pbi_service_setting.png new file mode 100644 index 0000000000..574aba671d Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_service_setting.png differ diff --git a/sql-odbc/docs/user/img/pbi_simple_graph.png b/sql-odbc/docs/user/img/pbi_simple_graph.png new file mode 100644 index 0000000000..9174de438b Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_simple_graph.png differ diff --git a/sql-odbc/docs/user/mac_configure_dsn.md b/sql-odbc/docs/user/mac_configure_dsn.md index 303ad23640..c4dd6b3faa 100644 --- a/sql-odbc/docs/user/mac_configure_dsn.md +++ b/sql-odbc/docs/user/mac_configure_dsn.md @@ -36,4 +36,36 @@ This is not required if you are using the Tableau Connector, but will help with

    -

    \ No newline at end of file +

    + + +## Troubleshooting + +### iODBC Administrator: “General installer error” when saving new ODBC Driver + +Try the following: + +1. Create the folder `~/Library/ODBC`, then try again +2. Create two files in `~/Library/ODBC`, then open iODBC Administrator and verify the contents of **odbcinst.ini** and **odbc.ini** align with the format below. + * **odbcinst.ini** (will be found in **ODBC Drivers**) + ```ini + [ODBC Drivers] + = Installed + + [] + Driver = /lib64/libodfesqlodbc.dylib + Setup = /lib64/libodfesqlodbc.dylib + ``` + * **odbc.ini** (will be found in **User DSNs**) + ```ini + [ODBC Data Sources] + = + + [] + Driver = /lib64/libodfesqlodbc.dylib + Description = + Host = localhost + Port = 9200 + Username = admin + Password = admin + ``` \ No newline at end of file diff --git a/sql-odbc/docs/user/power_bi_service_support.md b/sql-odbc/docs/user/power_bi_service_support.md new file mode 100644 index 0000000000..46edbf01bd --- /dev/null +++ b/sql-odbc/docs/user/power_bi_service_support.md @@ -0,0 +1,82 @@ +# Connecting Open Distro For ElasticSearch to Microsoft Power BI Service + +## Setup +* Download and Install [On-premises data gateway](https://docs.microsoft.com/en-us/data-integration/gateway/service-gateway-install) +* Change the path for custom data connector folder in On-premises data gateway so that the gateway can find the custom connector. +> NOTE: Ensure the gateway service account (**PBIEgwService**) has permissions to access the custom connector folder. Alternatively, you can copy connector file to `C:\Windows\ServiceProfiles\PBIEgwService\Documents\Power BI Desktop\Custom Connectors\`. + + + +* Verify the status of data gateway is ready to be used. + + + +* Login to Power BI Service. +* Click on **Setting** > **Manage Gateway**. + + + +* Select **Allow user's custom data connectors to refresh through this gateway cluster(preview)**. Click on **Apply**. + + + +* Click on **Add data sources to use the gateway**. +* Select Data Source Type as **Open Distro For Elasticsearch**. +* Enter Data Source Name and Server values. +* Select required **Authentication Method**. Select **Anonymous** for auth **NONE**. +For **AWS_SIGV4**, select **Key** and set aws access credentials for user **PBIEgwService** at path `C:\Windows\ServiceProfiles\PBIEgwService\.aws\` + +* Select Encryption mode for connection. +* Click on **Add**. + + + +* You will get a **Connection Successful** message. + + + + +## Publish Report + +* Follow [instructions](./power_bi_support.md) to create graph using Open Disto For Elasticsearch Data connector. +* Click on **Publish** to publish the report on Power BI service. + + + +* Select destination and click on **Select**. + + + +* You will get a success message when report is published. + + + +* Click on **Open '%report name%' in Power BI** to open published report in Power BI service. + +## Modify report using Power BI Service + +* Click on **Edit report** to modfify report. + + + +* Use **Filters**,**Visualizations** and **Fields** to modify report. + + + +## Dataset Scheduled Refresh + +* Click on **Settings** > **Datasets**. +* Select required Gateway and click on **Apply**. + + + +* Turn on Keep your data up to date option. +* Select refresh frequency and timezone. +* Add email for failure notifications if required. +* Click on **Apply**. + + + +* You can also check history by clicking on **Refresh history**. + + \ No newline at end of file diff --git a/sql-odbc/docs/user/power_bi_support.md b/sql-odbc/docs/user/power_bi_support.md index 3e5a6f2c4f..e155561518 100644 --- a/sql-odbc/docs/user/power_bi_support.md +++ b/sql-odbc/docs/user/power_bi_support.md @@ -1,12 +1,11 @@ # Connecting Open Distro For ElasticSearch to Microsoft Power BI Desktop -**NOTE**: **The connector is under development. All connection options are not available yet. There could be issues while loading data** - ## Prerequisites * Microsoft Power BI Desktop * [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) * [Open Distro for Elasticsearch SQL ODBC driver](https://opendistro.github.io/for-elasticsearch-docs/docs/sql/odbc/) * [OdfeSqlOdbcPBIConnector.mez](../../src/PowerBIConnector/bin/Release/) +* Optional: [odfesqlodbc_import.pbids](../../src/PowerBIConnector/PBIDSExamples) to help with repeated connections to the same server ## Setup * Copy `OdfeSqlOdbcPBIConnector.mez` file in the `\Documents\Power BI Desktop\Custom Connectors\` folder. This will let Power BI access custom connector. @@ -19,7 +18,14 @@ ## Load Data +> **NOTE**: Currently only import mode is supported. Direct query support will be added soon. + * Open Power BI Desktop. + +* Disable parallel loading of tables. Click on **Files** > **Options and settings** > **Options** > **CURRENT FILE** > **Data Load** > Deselect **Enable parallel loading of tables** and click **OK**. + + + * Click on **Home** > **Get Data** > **More** > **Other**. Select **Open Distro For Elasticsearch (Beta)**. Click on **Connect**. @@ -28,7 +34,7 @@ -* Enter host and port values. Click on **OK**. +* Enter server value. Click on **OK**. @@ -42,6 +48,42 @@ * Click on **Load**. +* Select required columns for creating graph. + + + +## Using .PBIDS Files + +More info: https://docs.microsoft.com/en-us/power-bi/connect-data/desktop-data-sources#using-pbids-files-to-get-data + +Example PBIDS file for Open Distro for Elasticsearch: (available here: [odfesqlodbc_import.pbids](../../src/PowerBIConnector/PBIDSExamples/odfesqlodbc_import.pbids)) +```json +{ + "version": "0.1", + "connections": [ + { + "details": { + "protocol": "odfesqlodbc", + "address": { + "server": "localhost:9200" + } + }, + "mode": "Import" + } + ] +} +``` + +The only part you should change is the `server` attribute, to point to the location of your ODFE server. +* For AWS connections, this will be the full path of your ODFE instance (ex: `https://aws-odfe-instance.us-west-1.com`). +* Otherwise, this will be the `host:port` combination for your instance (ex: `localhost:9200`). + +Save this as a `.pbids` file. Double-click on it to open up your connection in Power BI Desktop. +It will take you straight to the **Navigator** window for selecting the tables from the ODFE server. +* If this is the first time you are connecting to this instance, you will be prompted for your credentials. + + + ## Troubleshooting * If you get an following error, please install [Open Distro For Elasticsearch SQL ODBC Driver](https://opendistro.github.io/for-elasticsearch-docs/docs/sql/odbc/). diff --git a/sql-odbc/run_test_runner.bat b/sql-odbc/run_test_runner.bat index 3f08b46ddd..a413b3ab81 100644 --- a/sql-odbc/run_test_runner.bat +++ b/sql-odbc/run_test_runner.bat @@ -15,7 +15,7 @@ set PROJECT_DIR=%CD% set TEST_RUNNER_DIR=%PROJECT_DIR%\src\TestRunner -set WORKING_DIR=%PROJECT_DIR%\bin64\Release +set WORKING_DIR=%PROJECT_DIR%\build\Debug64\odbc\bin\Debug cd %WORKING_DIR% diff --git a/sql-odbc/scripts/build_aws-sdk-cpp.ps1 b/sql-odbc/scripts/build_aws-sdk-cpp.ps1 new file mode 100644 index 0000000000..f3835e77a7 --- /dev/null +++ b/sql-odbc/scripts/build_aws-sdk-cpp.ps1 @@ -0,0 +1,34 @@ +$CONFIGURATION = $args[0] +$WIN_ARCH = $args[1] +$SRC_DIR = $args[2] +$BUILD_DIR = $args[3] +$INSTALL_DIR = $args[4] + +Write-Host $args + +# Clone the AWS SDK CPP repo +# $SDK_VER = "1.7.29" +# -b "$SDK_VER" ` +git clone ` + --single-branch ` + "https://github.com/aws/aws-sdk-cpp.git" ` + $SRC_DIR + +# Make and move to build directory +New-Item -Path $BUILD_DIR -ItemType Directory -Force | Out-Null +Set-Location $BUILD_DIR + +# Configure and build +cmake $SRC_DIR ` + -A $WIN_ARCH ` + -D CMAKE_INSTALL_PREFIX=$INSTALL_DIR ` + -D CMAKE_BUILD_TYPE=$CONFIGURATION ` + -D BUILD_ONLY="core" ` + -D ENABLE_UNITY_BUILD="ON" ` + -D CUSTOM_MEMORY_MANAGEMENT="OFF" ` + -D ENABLE_RTTI="OFF" ` + -D ENABLE_TESTING="OFF" + +# Build AWS SDK and install to $INSTALL_DIR +msbuild ALL_BUILD.vcxproj /m /p:Configuration=$CONFIGURATION +msbuild INSTALL.vcxproj /m /p:Configuration=$CONFIGURATION diff --git a/sql-odbc/scripts/build_driver.ps1 b/sql-odbc/scripts/build_driver.ps1 new file mode 100644 index 0000000000..85d9de5731 --- /dev/null +++ b/sql-odbc/scripts/build_driver.ps1 @@ -0,0 +1,15 @@ +$CONFIGURATION = $args[0] +$WIN_ARCH = $args[1] +$SRC_DIR = $args[2] +$BUILD_DIR = $args[3] +$INSTALL_DIR = $args[4] + +cmake -S $SRC_DIR ` + -B $BUILD_DIR ` + -A $WIN_ARCH ` + -D CMAKE_BUILD_TYPE=$CONFIGURATION ` + -D CMAKE_INSTALL_PREFIX=$INSTALL_DIR ` + -D BUILD_WITH_TESTS=ON + +# # Build Project +cmake --build $BUILD_DIR --config $CONFIGURATION --parallel 4 diff --git a/sql-odbc/scripts/build_installer.ps1 b/sql-odbc/scripts/build_installer.ps1 new file mode 100644 index 0000000000..71b0f54e1b --- /dev/null +++ b/sql-odbc/scripts/build_installer.ps1 @@ -0,0 +1,17 @@ +$CONFIGURATION = $args[0] +$WIN_ARCH = $args[1] +$SRC_DIR = $args[2] +$BUILD_DIR = $args[3] +$INSTALL_DIR = $args[4] + +Write-Host $args + +cmake -S $SRC_DIR ` + -B $BUILD_DIR ` + -A $WIN_ARCH ` + -D CMAKE_BUILD_TYPE=$CONFIGURATION ` + -D CMAKE_INSTALL_PREFIX=$INSTALL_DIR ` + -D BUILD_WITH_TESTS=OFF + +# # Build Project +msbuild $BUILD_DIR\PACKAGE.vcxproj -m -p:Configuration=Release diff --git a/sql-odbc/scripts/build_windows.ps1 b/sql-odbc/scripts/build_windows.ps1 new file mode 100644 index 0000000000..41ef150424 --- /dev/null +++ b/sql-odbc/scripts/build_windows.ps1 @@ -0,0 +1,48 @@ +# Build AWS SDK +$CURRENT_DIR = Get-Location +$WORKING_DIR = $args[0] +$CONFIGURATION = $args[1] +$BITNESS = $args[2] +if ($BITNESS -eq "64") { + $WIN_ARCH = "x64" +} +else { + $WIN_ARCH = "Win32" +} + +# Create build directory; remove if exists +$BUILD_DIR = "${WORKING_DIR}\build" +# $BUILD_DIR = "${WORKING_DIR}\build\${CONFIGURATION}${BITNESS}" +New-Item -Path $BUILD_DIR -ItemType Directory -Force | Out-Null + +# Build AWS SDK CPP +$SDK_SOURCE_DIR = "${WORKING_DIR}\src\aws-sdk-cpp" +$SDK_BUILD_DIR = "${BUILD_DIR}\aws-sdk\build" +$SDK_INSTALL_DIR = "${BUILD_DIR}\aws-sdk\install" + +.\scripts\build_aws-sdk-cpp.ps1 ` + $CONFIGURATION $WIN_ARCH ` + $SDK_SOURCE_DIR $SDK_BUILD_DIR $SDK_INSTALL_DIR +Set-Location $CURRENT_DIR + +# Build driver +$DRIVER_SOURCE_DIR = "${WORKING_DIR}\src" +$DRIVER_BUILD_DIR = "${BUILD_DIR}\odbc\cmake" + +.\scripts\build_driver.ps1 ` + $CONFIGURATION $WIN_ARCH ` + $DRIVER_SOURCE_DIR $DRIVER_BUILD_DIR $SDK_INSTALL_DIR +Set-Location $CURRENT_DIR + +# Move driver dependencies to bin directory for testing +$DRIVER_BIN_DIR = "$DRIVER_BUILD_DIR\..\bin\$CONFIGURATION" +New-Item -Path $DRIVER_BIN_DIR -ItemType Directory -Force | Out-Null + +Copy-Item $SDK_BUILD_DIR\bin\$CONFIGURATION\* $DRIVER_BIN_DIR +Copy-Item $DRIVER_BUILD_DIR\bin\$CONFIGURATION\* $DRIVER_BIN_DIR +if ($BITNESS -eq "32") { + # Strip bitness from 32bit VLD DLL dir name + $BITNESS = $null + $WIN_ARCH = "x86" +} +Copy-Item .\libraries\VisualLeakDetector\bin$BITNESS\vld_$WIN_ARCH.dll $DRIVER_BIN_DIR diff --git a/sql-odbc/scripts/prepare_ci_output.ps1 b/sql-odbc/scripts/prepare_ci_output.ps1 new file mode 100644 index 0000000000..27e0728a6b --- /dev/null +++ b/sql-odbc/scripts/prepare_ci_output.ps1 @@ -0,0 +1,20 @@ +$ODBC_BIN_PATH = $args[0] +$ODBC_LIB_PATH = $args[1] +$ODBC_BUILD_PATH = $args[2] + +Write-Host $args + +# Create staging directories for CI artifacts +$CI_OUTPUT_PATH = ".\ci-output" +New-Item -Path $CI_OUTPUT_PATH -ItemType Directory -Force | Out-Null +New-Item -Path $CI_OUTPUT_PATH\build -ItemType Directory -Force | Out-Null +New-Item -Path $CI_OUTPUT_PATH\installer -ItemType Directory -Force | Out-Null + +# Copy CI artifacts to respective directories +Copy-Item $ODBC_BIN_PATH\*.dll $CI_OUTPUT_PATH\build +Copy-Item $ODBC_BIN_PATH\*.exe $CI_OUTPUT_PATH\build +Copy-Item $ODBC_LIB_PATH\*.lib $CI_OUTPUT_PATH\build +Copy-Item $ODBC_BUILD_PATH\*.msi $CI_OUTPUT_PATH\installer +# mkdir $CI_OUTPUT_PATH\test +# Copy-Item $ODBC_BIN_PATH\*.log $CI_OUTPUT_PATH\test +# Copy-Item $ODBC_BIN_PATH\*.html $CI_OUTPUT_PATH\test \ No newline at end of file diff --git a/sql-odbc/src/CMakeLists.txt b/sql-odbc/src/CMakeLists.txt index b8b663f016..cde619bcd3 100644 --- a/sql-odbc/src/CMakeLists.txt +++ b/sql-odbc/src/CMakeLists.txt @@ -25,6 +25,20 @@ project(global_make_list) include("${CMAKE_CURRENT_SOURCE_DIR}/modules/code-coverage.cmake") add_code_coverage_all_targets(EXCLUDE libraries aws-cpp-sdk googletest IntegrationTests) +if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(BITNESS 64) + set(BITNESS_NAME "x64") + set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../build/odbc/lib") + set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../build/odbc/lib") + set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../build/odbc/bin") +elseif(CMAKE_SIZEOF_VOID_P EQUAL 4) + set(BITNESS 32) + set(BITNESS_NAME "x86") + set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../build/odbc/lib") + set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../build/odbc/lib") + set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../build/odbc/bin") +endif() + # This wasn't being picked up on mac, causes some symbol errors if(APPLE) set(CMAKE_CXX_STANDARD 20) @@ -47,20 +61,6 @@ else() add_compile_options(-Wall -Wextra -pedantic -Werror) endif() -if(CMAKE_SIZEOF_VOID_P EQUAL 8) - set(BITNESS 64) - set(BITNESS_NAME "x64") - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../lib64") - set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../lib64") - set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../bin64") -elseif(CMAKE_SIZEOF_VOID_P EQUAL 4) - set(BITNESS 32) - set(BITNESS_NAME "x86") - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../lib32") - set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../lib32") - set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../bin32") -endif() - if(NOT WIN32) # Unix builds require autoconf option(AUTOCONF_ENABLE "Enable autoconf" ON) diff --git a/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp b/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp index 8924bdb119..fc928c15fa 100644 --- a/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp +++ b/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp @@ -491,13 +491,13 @@ const std::vector< sample_data_getTypeInfo_struct > sample_data_all_types_info{ SQL_TINYINT, 0, 10, 0}, {"short", SQL_SMALLINT, 5, "", "", "", 2, 0, 3, 0, 0, 0, "", 0, 0, SQL_SMALLINT, 0, 10, 0}, - {"keyword", SQL_WVARCHAR, 256, "\"", "\"", "", 2, 1, 3, 1, 0, 0, "", 0, 0, + {"keyword", SQL_WVARCHAR, 256, "\'", "\'", "", 2, 1, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}, - {"text", SQL_WVARCHAR, 2147483647, "\"", "\"", "", 2, 1, 3, 1, 0, 0, "", 0, + {"text", SQL_WVARCHAR, 2147483647, "\'", "\'", "", 2, 1, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}, - {"nested", SQL_WVARCHAR, 0, "\"", "\"", "", 2, 0, 3, 1, 0, 0, "", 0, 0, + {"nested", SQL_WVARCHAR, 0, "\'", "\'", "", 2, 0, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}, - {"object", SQL_WVARCHAR, 0, "\"", "\"", "", 2, 0, 3, 1, 0, 0, "", 0, 0, + {"object", SQL_WVARCHAR, 0, "\'", "\'", "", 2, 0, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}, {"integer", SQL_INTEGER, 10, "", "", "", 2, 0, 3, 0, 0, 0, "", 0, 0, SQL_INTEGER, 0, 10, 0}, @@ -516,13 +516,13 @@ const std::vector< sample_data_getTypeInfo_struct > sample_data_all_types_info{ const std::vector< sample_data_getTypeInfo_struct > sample_data_single_type_multiple_row{ - {"keyword", SQL_WVARCHAR, 256, "\"", "\"", "", 2, 1, 3, 1, 0, 0, "", 0, + {"keyword", SQL_WVARCHAR, 256, "\'", "\'", "", 2, 1, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}, - {"text", SQL_WVARCHAR, 2147483647, "\"", "\"", "", 2, 1, 3, 1, 0, 0, "", + {"text", SQL_WVARCHAR, 2147483647, "\'", "\'", "", 2, 1, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}, - {"nested", SQL_WVARCHAR, 0, "\"", "\"", "", 2, 0, 3, 1, 0, 0, "", 0, 0, + {"nested", SQL_WVARCHAR, 0, "\'", "\'", "", 2, 0, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}, - {"object", SQL_WVARCHAR, 0, "\"", "\"", "", 2, 0, 3, 1, 0, 0, "", 0, 0, + {"object", SQL_WVARCHAR, 0, "\'", "\'", "", 2, 0, 3, 1, 0, 0, "", 0, 0, SQL_WVARCHAR, 0, 10, 0}}; const std::vector< sample_data_getTypeInfo_struct > diff --git a/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp b/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp index 4f2e43b8cf..0e8808e4d9 100644 --- a/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp +++ b/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp @@ -180,7 +180,7 @@ TEST_SQL_GET_INFO_VERSION_GE(SQLDBMSVer, SQL_DBMS_VER, L"7.1.1"); TEST_SQL_GET_INFO_STRING(SQLColumnAlias, SQL_COLUMN_ALIAS, L"Y"); TEST_SQL_GET_INFO_UINT16(SQLGroupBy, SQL_GROUP_BY, - SQL_GB_GROUP_BY_EQUALS_SELECT); + SQL_GB_GROUP_BY_CONTAINS_SELECT); TEST_SQL_GET_INFO_STRING(SQLIdentifierQuoteChar, SQL_IDENTIFIER_QUOTE_CHAR, L"`"); TEST_SQL_GET_INFO_UINT_MASK(SQLOJCapabilities, SQL_OJ_CAPABILITIES, diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.pq b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.pq index f1af5f74b6..e5659bf21f 100644 --- a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.pq +++ b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.pq @@ -12,22 +12,17 @@ shared OdfeSqlOdbcPBIConnector.Contents = Value.ReplaceType(OdfeSqlOdbcPBIConnec // Wrapper function to provide additional UI customization. OdfeSqlOdbcPBIConnectorType = type function ( - Host as (type text meta [ - Documentation.FieldCaption = "Host", + Server as (type text meta [ + Documentation.FieldCaption = "Server", Documentation.FieldDescription = "The hostname of the Open Distro For Elasticsearch server.", - Documentation.SampleValues = { "localhost" } - ]), - optional Port as (type number meta [ - Documentation.FieldCaption = "Port", - Documentation.FieldDescription = "The port of the Open Distro For Elasticsearch server is running on.", - Documentation.SampleValues = { 9200 } + Documentation.SampleValues = { "localhost:9200" } ]) ) as table meta [ Documentation.Name = "Open Distro For Elasticsearch" ]; -OdfeSqlOdbcPBIConnectorImpl = (Host as text, optional Port as number) as table => +OdfeSqlOdbcPBIConnectorImpl = (Server as text) as table => let Credential = Extension.CurrentCredential(), AuthenticationMode = Credential[AuthenticationKind], @@ -61,23 +56,16 @@ OdfeSqlOdbcPBIConnectorImpl = (Host as text, optional Port as number) as table = UseSSL = 0 ], - // Set host & port in connection string. - // Do not include port in connection string for aws server connection. - Server = - if Port <> null then - [ - Host = Host, - Port = Port - ] - else - [ - Host = Host - ], - ConnectionString = [ - Driver = "ODFE SQL ODBC Driver" + Driver = "ODFE SQL ODBC Driver", + Host = Server ], + SQLGetInfo = Diagnostics.LogValue("SQLGetInfo_Options", [ + SQL_AGGREGATE_FUNCTIONS = ODBC[SQL_AF][All], + SQL_SQL_CONFORMANCE = ODBC[SQL_SC][SQL_SC_SQL92_INTERMEDIATE] + ]), + SQLGetTypeInfo = (types) => if (EnableTraceOutput <> true) then types else let @@ -102,28 +90,23 @@ OdfeSqlOdbcPBIConnectorImpl = (Host as text, optional Port as number) as table = Value.ReplaceType(toTable, Value.Type(source)) else source, - - // Add support for `LIMIT` and `OFFSET` clauses (rather than `TOP`) - AstVisitor = [ - // format is "LIMIT [,]" - ex. LIMIT 2,10 or LIMIT 10 - LimitClause = (skip, take) => - if (take = null) then - ... - else - let - skip = - if (skip = null or skip = 0) then - "" - else - Number.ToText(skip) & "," - in - [ - Text = Text.Format("LIMIT #{0}#{1}", { skip, take }), - Location = "AfterQuerySpecification" - ] - ], + + SQLGetFunctions = Diagnostics.LogValue("SQLGetFunctions_Options", [ + SQL_API_SQLBINDPARAMETER = false + ]), + + SqlCapabilities = Diagnostics.LogValue("SqlCapabilities_Options", [ + SupportsTop = false, + LimitClauseKind = LimitClauseKind.LimitOffset, + Sql92Conformance = ODBC[SQL_SC][SQL_SC_SQL92_FULL], + SupportsNumericLiterals = true, + SupportsStringLiterals = true, + SupportsOdbcDateLiterals = true, + SupportsOdbcTimeLiterals = true, + SupportsOdbcTimestampLiterals = true + ]), - OdbcDatasource = Odbc.DataSource(ConnectionString & Server & CredentialConnectionString & EncryptedConnectionString, [ + OdbcDatasource = Odbc.DataSource(ConnectionString & CredentialConnectionString & EncryptedConnectionString, [ // Do not view the tables grouped by their schema names. HierarchicalNavigation = false, // Prevents execution of native SQL statements. Extensions should set this to true. @@ -136,9 +119,11 @@ OdfeSqlOdbcPBIConnectorImpl = (Host as text, optional Port as number) as table = ClientConnectionPooling = true, // These values should be set by previous steps - AstVisitor = AstVisitor, SQLColumns = SQLColumns, SQLGetTypeInfo = SQLGetTypeInfo, + SQLGetInfo = SQLGetInfo, + SQLGetFunctions = SQLGetFunctions, + SqlCapabilities = SqlCapabilities, OnError = OnOdbcError, @@ -152,7 +137,7 @@ OdfeSqlOdbcPBIConnectorImpl = (Host as text, optional Port as number) as table = OnOdbcError = (errorRecord as record) => let ErrorMessage = errorRecord[Message], - ConnectionHostPort = errorRecord[Detail][DataSourcePath], + ConnectionServer = errorRecord[Detail][DataSourcePath], IsDriverNotInstalled = Text.Contains(ErrorMessage, "doesn't correspond to an installed ODBC driver"), @@ -166,7 +151,7 @@ OnOdbcError = (errorRecord as record) => if IsDriverNotInstalled then error Error.Record("", "The Open Distro For Elasticsearch SQL ODBC driver is not installed. Please install the driver") else if IsHostUnreachable then - error Error.Record("", "Couldn't reach server. Please double-check the host, port and auth.") + error Error.Record("", "Couldn't reach server. Please double-check the server and auth. [" & ConnectionServer & "]") else error errorRecord; @@ -176,10 +161,9 @@ OdfeSqlOdbcPBIConnector = [ TestConnection = (dataSourcePath) => let json = Json.Document(dataSourcePath), - Host = json[Host], - Port = json[Port] + Server = json[Server] in - { "OdfeSqlOdbcPBIConnector.Contents", Host, Port }, + { "OdfeSqlOdbcPBIConnector.Contents", Server }, // Authentication modes Authentication = [ @@ -195,6 +179,19 @@ OdfeSqlOdbcPBIConnector = [ ] ], + // PBIDS Handler + DSRHandlers = [ + #"odfesqlodbc" = [ + GetDSR = (server, schema, object, optional options) => [ protocol = "odfesqlodbc", address = [ server = server ] ], + GetFormula = (dsr, optional options) => () => + let + db = OdfeSqlOdbcPBIConnector.Contents(dsr[address][server]) + in + db, + GetFriendlyName = (dsr) => "ODFE SQL ODBC" + ] + ], + // Enable Encryption SupportsEncryption = true, @@ -207,7 +204,10 @@ OdfeSqlOdbcPBIConnector.Publish = [ Category = "Other", ButtonText = { Extension.LoadString("ButtonTitle"), Extension.LoadString("ButtonHelp") }, LearnMoreUrl = "https://opendistro.github.io/for-elasticsearch/", - SupportsDirectQuery = true, + + // Disabling direct query due to limited SQL query support + SupportsDirectQuery = false, + SourceImage = OdfeSqlOdbcPBIConnector.Icons, SourceTypeImage = OdfeSqlOdbcPBIConnector.Icons ]; diff --git a/sql-odbc/src/PowerBIConnector/PBIDSExamples/odfesqlodbc_import.pbids b/sql-odbc/src/PowerBIConnector/PBIDSExamples/odfesqlodbc_import.pbids new file mode 100644 index 0000000000..f567a76409 --- /dev/null +++ b/sql-odbc/src/PowerBIConnector/PBIDSExamples/odfesqlodbc_import.pbids @@ -0,0 +1,14 @@ +{ + "version": "0.1", + "connections": [ + { + "details": { + "protocol": "odfesqlodbc", + "address": { + "server": "localhost:9200" + } + }, + "mode": "Import" + } + ] +} \ No newline at end of file diff --git a/sql-odbc/src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez b/sql-odbc/src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez index dc5ae9d8ab..8354aeb7d4 100644 Binary files a/sql-odbc/src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez and b/sql-odbc/src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez differ diff --git a/sql-odbc/src/installer/CMakeLists.txt b/sql-odbc/src/installer/CMakeLists.txt index 712653058c..0b47894b64 100644 --- a/sql-odbc/src/installer/CMakeLists.txt +++ b/sql-odbc/src/installer/CMakeLists.txt @@ -109,10 +109,11 @@ install(FILES "${CMAKE_CURRENT_SOURCE_DIR}/Resources/odfe_sql_odbc.tdc" DESTINAT # Install AWS dependencies if(WIN32) - install(FILES "${PROJECT_ROOT}/sdk-build${BITNESS}/bin/Release/aws-c-common.dll" DESTINATION bin COMPONENT "Driver") - install(FILES "${PROJECT_ROOT}/sdk-build${BITNESS}/bin/Release/aws-c-event-stream.dll" DESTINATION bin COMPONENT "Driver") - install(FILES "${PROJECT_ROOT}/sdk-build${BITNESS}/bin/Release/aws-checksums.dll" DESTINATION bin COMPONENT "Driver") - install(FILES "${PROJECT_ROOT}/sdk-build${BITNESS}/bin/Release/aws-cpp-sdk-core.dll" DESTINATION bin COMPONENT "Driver") + set(AWS_SDK_BIN_DIR "${PROJECT_ROOT}/build/aws-sdk/install/bin") + install(FILES "${AWS_SDK_BIN_DIR}/aws-c-common.dll" DESTINATION bin COMPONENT "Driver") + install(FILES "${AWS_SDK_BIN_DIR}/aws-c-event-stream.dll" DESTINATION bin COMPONENT "Driver") + install(FILES "${AWS_SDK_BIN_DIR}/aws-checksums.dll" DESTINATION bin COMPONENT "Driver") + install(FILES "${AWS_SDK_BIN_DIR}/aws-cpp-sdk-core.dll" DESTINATION bin COMPONENT "Driver") endif() include(CPack) diff --git a/sql-odbc/src/odfesqlodbc/es_communication.cpp b/sql-odbc/src/odfesqlodbc/es_communication.cpp index b18af24f33..8f895c6b09 100644 --- a/sql-odbc/src/odfesqlodbc/es_communication.cpp +++ b/sql-odbc/src/odfesqlodbc/es_communication.cpp @@ -173,6 +173,23 @@ std::shared_ptr< ErrorDetails > ESCommunication::ParseErrorResponse( } } +void ESCommunication::SetErrorDetails(std::string reason, std::string message, + ConnErrorType error_type) { + // Prepare document and validate schema + auto error_details = std::make_shared< ErrorDetails >(); + error_details->reason = reason; + error_details->details = message; + error_details->source_type = "Dummy type"; + error_details->type = error_type; + m_error_details = error_details; +} + +void ESCommunication::SetErrorDetails(ErrorDetails details) { + // Prepare document and validate schema + auto error_details = std::make_shared< ErrorDetails >(details); + m_error_details = error_details; +} + void ESCommunication::GetJsonSchema(ESResult& es_result) { // Prepare document and validate schema try { @@ -215,10 +232,15 @@ ESCommunication::~ESCommunication() { std::string ESCommunication::GetErrorMessage() { // TODO #35 - Check if they expect NULL or "" when there is no error. - m_error_details->details = std::regex_replace(m_error_details->details, - std::regex("\\n"), "\\\\n"); - return ERROR_MSG_PREFIX + m_error_details->reason + ": " - + m_error_details->details; + if (m_error_details) { + m_error_details->details = std::regex_replace( + m_error_details->details, std::regex("\\n"), "\\\\n"); + return ERROR_MSG_PREFIX + m_error_details->reason + ": " + + m_error_details->details; + } else { + return ERROR_MSG_PREFIX + + "No error details available; check the driver logs."; + } } ConnErrorType ESCommunication::GetErrorType() { @@ -243,9 +265,11 @@ bool ESCommunication::ConnectDBStart() { LogMsg(ES_ALL, "Starting DB connection."); m_status = ConnStatusType::CONNECTION_BAD; if (!m_valid_connection_options) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; + // TODO: get error message from CheckConnectionOptions m_error_message = "Invalid connection options, unable to connect to DB."; + SetErrorDetails("Invalid connection options", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); DropDBConnection(); return false; @@ -253,8 +277,9 @@ bool ESCommunication::ConnectDBStart() { m_status = ConnStatusType::CONNECTION_NEEDED; if (!EstablishConnection()) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Failed to establish connection to DB."; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); DropDBConnection(); return false; @@ -287,18 +312,21 @@ bool ESCommunication::CheckConnectionOptions() { if (m_rt_opts.auth.auth_type == AUTHTYPE_BASIC) { if (m_rt_opts.auth.username.empty() || m_rt_opts.auth.password.empty()) { - m_error_type = ConnErrorType::CONN_ERROR_INVALID_AUTH; m_error_message = AUTHTYPE_BASIC " authentication requires a username and password."; + SetErrorDetails("Auth error", m_error_message, + ConnErrorType::CONN_ERROR_INVALID_AUTH); } } else { - m_error_type = ConnErrorType::CONN_ERROR_INVALID_AUTH; m_error_message = "Unknown authentication type: '" + m_rt_opts.auth.auth_type + "'"; + SetErrorDetails("Auth error", m_error_message, + ConnErrorType::CONN_ERROR_INVALID_AUTH); } } else if (m_rt_opts.conn.server == "") { - m_error_type = ConnErrorType::CONN_ERROR_UNABLE_TO_ESTABLISH; m_error_message = "Host connection option was not specified."; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_UNABLE_TO_ESTABLISH); } if (m_error_message != "") { @@ -402,17 +430,18 @@ bool ESCommunication::IsSQLPluginInstalled(const std::string& plugin_response) { if (!plugin_name.compare(OPENDISTRO_SQL_PLUGIN_NAME)) { std::string sql_plugin_version = it.at("version").as_string(); - LogMsg(ES_ERROR, std::string("Found SQL plugin version '" - + sql_plugin_version + "'.") - .c_str()); + LogMsg(ES_INFO, std::string("Found SQL plugin version '" + + sql_plugin_version + "'.") + .c_str()); return true; } } else { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Could not find all necessary fields in the plugin " "response object. " "(\"component\", \"version\")"; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); throw std::runtime_error(m_error_message.c_str()); } } @@ -420,18 +449,23 @@ bool ESCommunication::IsSQLPluginInstalled(const std::string& plugin_response) { m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); } catch (const rabbit::parse_error& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); } catch (const std::exception& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); } catch (...) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Unknown exception thrown when parsing plugin endpoint response."; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); } LogMsg(ES_ERROR, m_error_message.c_str()); @@ -452,30 +486,35 @@ bool ESCommunication::EstablishConnection() { IssueRequest(PLUGIN_ENDPOINT_FORMAT_JSON, Aws::Http::HttpMethod::HTTP_GET, "", "", ""); if (response == nullptr) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "The SQL plugin must be installed in order to use this driver. " "Received NULL response."; + SetErrorDetails("HTTP client error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); } else { AwsHttpResponseToString(response, m_response_str); if (response->GetResponseCode() != Aws::Http::HttpResponseCode::OK) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; - m_error_message = - "The SQL plugin must be installed in order to use this driver."; - if (response->HasClientError()) + if (response->HasClientError()) { m_error_message += " Client error: '" + response->GetClientErrorMessage() + "'."; - if (!m_response_str.empty()) + SetErrorDetails("HTTP client error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); + } + if (!m_response_str.empty()) { m_error_message += " Response error: '" + m_response_str + "'."; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); + } } else { if (IsSQLPluginInstalled(m_response_str)) { return true; } else { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "The SQL plugin must be installed in order to use this " "driver. Response body: '" + m_response_str + "'"; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); } } } @@ -505,10 +544,11 @@ std::vector< std::string > ESCommunication::GetColumnsWithSelectQuery( // Validate response if (response == nullptr) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Failed to receive response from query. " "Received NULL response."; + SetErrorDetails("HTTP client error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); return list_of_column; } @@ -531,6 +571,8 @@ std::vector< std::string > ESCommunication::GetColumnsWithSelectQuery( m_error_message += " Response error: '" + result->result_json + "'."; } + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); return list_of_column; } @@ -550,13 +592,15 @@ std::vector< std::string > ESCommunication::GetColumnsWithSelectQuery( int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { m_error_details.reset(); if (!query) { - m_error_type = ConnErrorType::CONN_ERROR_INVALID_NULL_PTR; m_error_message = "Query is NULL"; + SetErrorDetails("Execution error", m_error_message, + ConnErrorType::CONN_ERROR_INVALID_NULL_PTR); LogMsg(ES_ERROR, m_error_message.c_str()); return -1; } else if (!m_http_client) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Unable to connect. Please try connecting again."; + SetErrorDetails("Execution error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); return -1; } @@ -574,10 +618,11 @@ int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { // Validate response if (response == nullptr) { - m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Failed to receive response from query. " "Received NULL response."; + SetErrorDetails("Execution error", m_error_message, + ConnErrorType::CONN_ERROR_QUERY_SYNTAX); LogMsg(ES_ERROR, m_error_message.c_str()); return -1; } @@ -609,12 +654,13 @@ int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { try { ConstructESResult(*result); } catch (std::runtime_error& e) { - m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Received runtime exception: " + std::string(e.what()); if (!result->result_json.empty()) { m_error_message += " Result body: " + result->result_json; } + SetErrorDetails("Execution error", m_error_message, + ConnErrorType::CONN_ERROR_QUERY_SYNTAX); LogMsg(ES_ERROR, m_error_message.c_str()); return -1; } @@ -649,10 +695,11 @@ void ESCommunication::SendCursorQueries(std::string cursor) { SQL_ENDPOINT_FORMAT_JDBC, Aws::Http::HttpMethod::HTTP_POST, ctype, "", "", cursor); if (response == nullptr) { - m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Failed to receive response from cursor. " "Received NULL response."; + SetErrorDetails("Cursor error", m_error_message, + ConnErrorType::CONN_ERROR_QUERY_SYNTAX); LogMsg(ES_ERROR, m_error_message.c_str()); return; } @@ -678,9 +725,10 @@ void ESCommunication::SendCursorQueries(std::string cursor) { result.release(); } } catch (std::runtime_error& e) { - m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Received runtime exception: " + std::string(e.what()); + SetErrorDetails("Cursor error", m_error_message, + ConnErrorType::CONN_ERROR_QUERY_SYNTAX); LogMsg(ES_ERROR, m_error_message.c_str()); } @@ -696,10 +744,11 @@ void ESCommunication::SendCloseCursorRequest(const std::string& cursor) { IssueRequest(SQL_ENDPOINT_CLOSE_CURSOR, Aws::Http::HttpMethod::HTTP_POST, ctype, "", "", cursor); if (response == nullptr) { - m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = - "Failed to receive response from cursor. " + "Failed to receive response from cursor close request. " "Received NULL response."; + SetErrorDetails("Cursor error", m_error_message, + ConnErrorType::CONN_ERROR_QUERY_SYNTAX); LogMsg(ES_ERROR, m_error_message.c_str()); } } @@ -782,10 +831,11 @@ std::string ESCommunication::GetServerVersion() { std::shared_ptr< Aws::Http::HttpResponse > response = IssueRequest("", Aws::Http::HttpMethod::HTTP_GET, "", "", ""); if (response == nullptr) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = - "Failed to receive response from query. " + "Failed to receive response from server version query. " "Received NULL response."; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); return ""; } @@ -801,19 +851,22 @@ std::string ESCommunication::GetServerVersion() { } } catch (const rabbit::type_mismatch& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const rabbit::parse_error& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const std::exception& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (...) { LogMsg(ES_ERROR, @@ -834,10 +887,11 @@ std::string ESCommunication::GetClusterName() { std::shared_ptr< Aws::Http::HttpResponse > response = IssueRequest("", Aws::Http::HttpMethod::HTTP_GET, "", "", ""); if (response == nullptr) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = - "Failed to receive response from query. " + "Failed to receive response from cluster name query. " "Received NULL response."; + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); return ""; } @@ -853,19 +907,22 @@ std::string ESCommunication::GetClusterName() { } } catch (const rabbit::type_mismatch& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const rabbit::parse_error& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const std::exception& e) { - m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); + SetErrorDetails("Connection error", m_error_message, + ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (...) { LogMsg(ES_ERROR, diff --git a/sql-odbc/src/odfesqlodbc/es_communication.h b/sql-odbc/src/odfesqlodbc/es_communication.h index ca8f623199..f910d0aec7 100644 --- a/sql-odbc/src/odfesqlodbc/es_communication.h +++ b/sql-odbc/src/odfesqlodbc/es_communication.h @@ -87,6 +87,9 @@ class ESCommunication { void GetJsonSchema(ESResult& es_result); void PrepareCursorResult(ESResult& es_result); std::shared_ptr< ErrorDetails > ParseErrorResponse(ESResult& es_result); + void SetErrorDetails(std::string reason, std::string message, + ConnErrorType error_type); + void SetErrorDetails(ErrorDetails details); // TODO #35 - Go through and add error messages on exit conditions std::string m_error_message; diff --git a/sql-odbc/src/odfesqlodbc/es_connection.cpp b/sql-odbc/src/odfesqlodbc/es_connection.cpp index 5699fb0a10..fb13d3131f 100644 --- a/sql-odbc/src/odfesqlodbc/es_connection.cpp +++ b/sql-odbc/src/odfesqlodbc/es_connection.cpp @@ -125,15 +125,11 @@ int LIBES_connect(ConnectionClass *self) { std::string msg = GetErrorMsg(esconn); char error_message_out[ERROR_BUFF_SIZE] = ""; if (!msg.empty()) - SPRINTF_FIXED( - error_message_out, - "elasticsearch connection status was not CONNECTION_OK: %s", - msg.c_str()); + SPRINTF_FIXED(error_message_out, "Connection error: %s", + msg.c_str()); else STRCPY_FIXED(error_message_out, - "elasticsearch connection status was not " - "CONNECTION_OK. No error message " - "available."); + "Connection error: No message available."); CC_set_error(self, CONN_OPENDB_ERROR, error_message_out, "LIBES_connect"); ESDisconnect(esconn); @@ -151,8 +147,8 @@ int LIBES_connect(ConnectionClass *self) { return 1; } -// TODO #36 - When we fix encoding, we should look into returning a code here. This -// is called in connection.c and the return code isn't checked +// TODO #36 - When we fix encoding, we should look into returning a code here. +// This is called in connection.c and the return code isn't checked void CC_set_locale_encoding(ConnectionClass *self, const char *encoding) { if (self == NULL) return; diff --git a/sql-odbc/src/odfesqlodbc/es_types.c b/sql-odbc/src/odfesqlodbc/es_types.c index 316bfcaf0c..d6da15cac9 100644 --- a/sql-odbc/src/odfesqlodbc/es_types.c +++ b/sql-odbc/src/odfesqlodbc/es_types.c @@ -1344,7 +1344,7 @@ const char *estype_literal_prefix(const ConnectionClass *conn, OID type) { case ES_TYPE_TEXT: case ES_TYPE_NESTED: case ES_TYPE_OBJECT: - return "\""; + return "\'"; default: return ""; } @@ -1357,7 +1357,7 @@ const char *estype_literal_suffix(const ConnectionClass *conn, OID type) { case ES_TYPE_TEXT: case ES_TYPE_NESTED: case ES_TYPE_OBJECT: - return "\""; + return "\'"; default: return ""; } diff --git a/sql-odbc/src/odfesqlodbc/es_types.h b/sql-odbc/src/odfesqlodbc/es_types.h index dcb73398c2..ab133ac9c8 100644 --- a/sql-odbc/src/odfesqlodbc/es_types.h +++ b/sql-odbc/src/odfesqlodbc/es_types.h @@ -299,6 +299,12 @@ typedef struct ErrorDetails { std::string details; std::string source_type; ConnErrorType type; + ErrorDetails() { + reason = ""; + details = ""; + source_type = ""; + type = ConnErrorType::CONN_ERROR_SUCCESS; + } } ErrorDetails; #define INVALID_OID 0 diff --git a/sql-odbc/src/odfesqlodbc/info.c b/sql-odbc/src/odfesqlodbc/info.c index a8925d4087..45e1bad0e3 100644 --- a/sql-odbc/src/odfesqlodbc/info.c +++ b/sql-odbc/src/odfesqlodbc/info.c @@ -245,7 +245,7 @@ RETCODE SQL_API ESAPI_GetInfo(HDBC hdbc, SQLUSMALLINT fInfoType, case SQL_GROUP_BY: /* ODBC 2.0 */ len = 2; - value = SQL_GB_GROUP_BY_EQUALS_SELECT; + value = SQL_GB_GROUP_BY_CONTAINS_SELECT; break; case SQL_IDENTIFIER_CASE: /* ODBC 1.0 */ diff --git a/sql-workbench/.cypress/integration/ui.spec.js b/sql-workbench/.cypress/integration/ui.spec.js new file mode 100644 index 0000000000..d9c07979f6 --- /dev/null +++ b/sql-workbench/.cypress/integration/ui.spec.js @@ -0,0 +1,118 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +/// + +import { edit } from "brace"; +import { delay, testQueries, verifyDownloadData, files } from "../utils/constants"; + +describe('Test UI buttons', () => { + beforeEach(() => { + cy.visit('app/opendistro-sql-workbench'); + }); + + it('Test Run button and field search', () => { + cy.get('textarea.ace_text-input').eq(0).focus().type('{enter}select * from accounts where balance > 49500;', { force: true }); + cy.wait(delay); + cy.get('.euiButton__text').contains('Run').click(); + cy.wait(delay); + cy.get('.euiTab__content').contains('accounts').click(); + + cy.get('input.euiFieldSearch').type('marissa'); + cy.get('span.euiTableCellContent__text').eq(15).should((account_number) => { + expect(account_number).to.contain('803'); + }); + }); + + it('Test Translate button', () => { + cy.get('textarea.ace_text-input').eq(0).focus().type('{selectall}{backspace}', { force: true }); + cy.wait(delay); + cy.get('textarea.ace_text-input').eq(0).focus().type('{selectall}{backspace}select log(balance) from accounts where abs(age) > 20;', { force: true }); + cy.wait(delay); + cy.get('.euiButton__text').contains('Translate').click(); + cy.wait(delay); + + // Note: Translation retrived this way will get cut off, so doing a substring check + cy.get('.ace_content').eq(1).then((translate_editor) => { + const editor = edit(translate_editor[0]); + expect(editor.getValue()).to.have.string("Math.abs(doc['age'].value);abs_1 > 20"); + }); + }); + + it('Test Clear button', () => { + cy.get('.euiButton__text').contains('Clear').click(); + cy.wait(delay); + + cy.get('.ace_content').eq(0).then((sql_query_editor) => { + const editor = edit(sql_query_editor[0]); + expect(editor.getValue()).to.equal(''); + }); + }); +}); + +describe('Test and verify downloads', () => { + verifyDownloadData.map(({ title, url, file }) => { + it(title, () => { + cy.request({ + method: 'POST', + form: true, + url: url, + headers: { + 'content-type': 'application/json;charset=UTF-8', + 'kbn-version': '7.8.0', + }, + body: { + 'query': 'select * from accounts where balance > 49500' + } + }).then(response => { + expect(response.body.resp).to.have.string(files[file]); + }); + }); + }); +}); + +describe('Test table display', () => { + beforeEach(() => { + cy.visit('app/opendistro-sql-workbench'); + cy.get('textarea.ace_text-input').eq(0).focus().type('{selectall}{backspace}', { force: true }); + cy.wait(delay); + }); + + testQueries.map(({ title, query, cell_idx, expected_string }) => { + it(title, () => { + cy.get('textarea.ace_text-input').eq(0).focus().type(`{selectall}{backspace}${query}`, { force: true }); + cy.wait(delay); + cy.get('.euiButton__text').contains('Run').click(); + cy.wait(delay); + + cy.get('span.euiTableCellContent__text').eq(cell_idx).should((cell) => { + expect(cell).to.contain(expected_string); + }); + }); + }); + + it('Test nested fields display', () => { + cy.get('textarea.ace_text-input').eq(0).focus().type(`{selectall}{backspace}select * from employee_nested;`, { force: true }); + cy.wait(delay); + cy.get('.euiButton__text').contains('Run').click(); + cy.wait(delay); + + cy.get('span.euiTableCellContent__text').eq(21).click(); + cy.wait(delay); + cy.get('span.euiTableCellContent__text').eq(27).should((cell) => { + expect(cell).to.contain('2018-06-23'); + }); + }); +}); \ No newline at end of file diff --git a/sql-workbench/.cypress/plugins/index.js b/sql-workbench/.cypress/plugins/index.js new file mode 100644 index 0000000000..d1af2d618d --- /dev/null +++ b/sql-workbench/.cypress/plugins/index.js @@ -0,0 +1,37 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +/// + +// *********************************************************** +// This example plugins/index.js can be used to load plugins +// +// You can change the location of this file or turn off loading +// the plugins file with the 'pluginsFile' configuration option. +// +// You can read more here: +// https://on.cypress.io/plugins-guide +// *********************************************************** + +// This function is called when a project is opened or re-opened (e.g. due to +// the project's config changing) + +/** + * @type {Cypress.PluginConfig} + */ +module.exports = (on, config) => { + // `on` is used to hook into various events Cypress emits + // `config` is the resolved Cypress config +}; diff --git a/sql-workbench/.cypress/support/commands.js b/sql-workbench/.cypress/support/commands.js new file mode 100644 index 0000000000..3d72c48ae9 --- /dev/null +++ b/sql-workbench/.cypress/support/commands.js @@ -0,0 +1,40 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +// *********************************************** +// This example commands.js shows you how to +// create various custom commands and overwrite +// existing commands. +// +// For more comprehensive examples of custom +// commands please read more here: +// https://on.cypress.io/custom-commands +// *********************************************** +// +// +// -- This is a parent command -- +// Cypress.Commands.add("login", (email, password) => { ... }) +// +// +// -- This is a child command -- +// Cypress.Commands.add("drag", { prevSubject: 'element'}, (subject, options) => { ... }) +// +// +// -- This is a dual command -- +// Cypress.Commands.add("dismiss", { prevSubject: 'optional'}, (subject, options) => { ... }) +// +// +// -- This will overwrite an existing command -- +// Cypress.Commands.overwrite("visit", (originalFn, url, options) => { ... }) diff --git a/sql-workbench/.cypress/support/index.js b/sql-workbench/.cypress/support/index.js new file mode 100644 index 0000000000..9d8000212d --- /dev/null +++ b/sql-workbench/.cypress/support/index.js @@ -0,0 +1,35 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +// *********************************************************** +// This example support/index.js is processed and +// loaded automatically before your test files. +// +// This is a great place to put global configuration and +// behavior that modifies Cypress. +// +// You can change the location of this file or turn off +// automatically serving support files with the +// 'supportFile' configuration option. +// +// You can read more here: +// https://on.cypress.io/configuration +// *********************************************************** + +// Import commands.js using ES2015 syntax: +import './commands'; + +// Alternatively you can use CommonJS syntax: +// require('./commands') diff --git a/sql-workbench/.cypress/utils/constants.js b/sql-workbench/.cypress/utils/constants.js new file mode 100644 index 0000000000..14d8fac685 --- /dev/null +++ b/sql-workbench/.cypress/utils/constants.js @@ -0,0 +1,97 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +export const delay = 300; + +export const verifyDownloadData = [ + { + title: 'Download and verify JSON', + url: 'api/sql_console/queryjson', + file: 'JSONFile' + }, + { + title: 'Download and verify JDBC', + url: 'api/sql_console/queryjdbc', + file: 'JDBCFile' + }, + { + title: 'Download and verify CSV', + url: 'api/sql_console/querycsv', + file: 'CSVFile' + }, + { + title: 'Download and verify Text', + url: 'api/sql_console/querytext', + file: 'TextFile' + }, +]; + +export const testQueries = [ + { + title: 'Test GROUP BY', + query: 'select count(*) from accounts group by gender;', + cell_idx: 5, + expected_string: '507' + }, + { + title: 'Test GROUP BY with aliases and scalar function', + query: 'SELECT ABS(age) AS a FROM accounts GROUP BY ABS(age);', + cell_idx: 17, + expected_string: '35.0' + }, + { + title: 'Test GROUP BY and HAVING', + query: 'SELECT age, MAX(balance) FROM accounts GROUP BY age HAVING MIN(balance) > 3000;', + cell_idx: 15, + expected_string: '49339' + }, + { + title: 'Test ORDER BY', + query: 'SELECT account_number FROM accounts ORDER BY account_number DESC;', + cell_idx: 5, + expected_string: '999' + }, + { + title: 'Test JOIN', + query: 'select a.account_number, a.firstname, a.lastname, e.id, e.name from accounts a join employee_nested e order by a.account_number;', + cell_idx: 45, + expected_string: 'Amber' + }, +]; + +export const files = { + JSONFile: + `"hits":[{"_index":"accounts","_type":"_doc","_id":"842","_score":0,"_source":{"account_number":842,"balance":49587,"firstname":"Meagan","lastname":"Buckner","age":23,"gender":"F","address":"833 Bushwick Court","employer":"Biospan","email":"meaganbuckner@biospan.com","city":"Craig","state":"TX"}},{"_index":"accounts","_type":"_doc","_id":"854","_score":0,"_source":{"account_number":854,"balance":49795,"firstname":"Jimenez","lastname":"Barry","age":25,"gender":"F","address":"603 Cooper Street","employer":"Verton","email":"jimenezbarry@verton.com","city":"Moscow","state":"AL"}},{"_index":"accounts","_type":"_doc","_id":"97","_score":0,"_source":{"account_number":97,"balance":49671,"firstname":"Karen","lastname":"Trujillo","age":40,"gender":"F","address":"512 Cumberland Walk","employer":"Tsunamia","email":"karentrujillo@tsunamia.com","city":"Fredericktown","state":"MO"}},{"_index":"accounts","_type":"_doc","_id":"168","_score":0,"_source":{"account_number":168,"balance":49568,"firstname":"Carissa","lastname":"Simon","age":20,"gender":"M","address":"975 Flatbush Avenue","employer":"Zillacom","email":"carissasimon@zillacom.com","city":"Neibert","state":"IL"}},{"_index":"accounts","_type":"_doc","_id":"240","_score":0,"_source":{"account_number":240,"balance":49741,"firstname":"Oconnor","lastname":"Clay","age":35,"gender":"F","address":"659 Highland Boulevard","employer":"Franscene","email":"oconnorclay@franscene.com","city":"Kilbourne","state":"NH"}},{"_index":"accounts","_type":"_doc","_id":"803","_score":0,"_source":{"account_number":803,"balance":49567,"firstname":"Marissa","lastname":"Spears","age":25,"gender":"M","address":"963 Highland Avenue","employer":"Centregy","email":"marissaspears@centregy.com","city":"Bloomington","state":"MS"}},{"_index":"accounts","_type":"_doc","_id":"248","_score":0,"_source":{"account_number":248,"balance":49989,"firstname":"West","lastname":"England","age":36,"gender":"M","address":"717 Hendrickson Place","employer":"Obliq","email":"westengland@obliq.com","city":"Maury","state":"WA"}}]`, + JDBCFile: + `{"schema":[{"name":"account_number","type":"long"},{"name":"firstname","type":"text"},{"name":"gender","type":"text"},{"name":"city","type":"text"},{"name":"balance","type":"long"},{"name":"employer","type":"text"},{"name":"state","type":"text"},{"name":"email","type":"text"},{"name":"address","type":"text"},{"name":"lastname","type":"text"},{"name":"age","type":"long"}],"total":7,"datarows":[[842,"Meagan","F","Craig",49587,"Biospan","TX","meaganbuckner@biospan.com","833 Bushwick Court","Buckner",23],[854,"Jimenez","F","Moscow",49795,"Verton","AL","jimenezbarry@verton.com","603 Cooper Street","Barry",25],[97,"Karen","F","Fredericktown",49671,"Tsunamia","MO","karentrujillo@tsunamia.com","512 Cumberland Walk","Trujillo",40],[168,"Carissa","M","Neibert",49568,"Zillacom","IL","carissasimon@zillacom.com","975 Flatbush Avenue","Simon",20],[240,"Oconnor","F","Kilbourne",49741,"Franscene","NH","oconnorclay@franscene.com","659 Highland Boulevard","Clay",35],[803,"Marissa","M","Bloomington",49567,"Centregy","MS","marissaspears@centregy.com","963 Highland Avenue","Spears",25],[248,"West","M","Maury",49989,"Obliq","WA","westengland@obliq.com","717 Hendrickson Place","England",36]],"size":7,"status":200}`, + CSVFile: + `account_number,firstname,address,balance,gender,city,employer,state,age,email,lastname +842,Meagan,833 Bushwick Court,49587,F,Craig,Biospan,TX,23,meaganbuckner@biospan.com,Buckner +854,Jimenez,603 Cooper Street,49795,F,Moscow,Verton,AL,25,jimenezbarry@verton.com,Barry +97,Karen,512 Cumberland Walk,49671,F,Fredericktown,Tsunamia,MO,40,karentrujillo@tsunamia.com,Trujillo +168,Carissa,975 Flatbush Avenue,49568,M,Neibert,Zillacom,IL,20,carissasimon@zillacom.com,Simon +240,Oconnor,659 Highland Boulevard,49741,F,Kilbourne,Franscene,NH,35,oconnorclay@franscene.com,Clay +803,Marissa,963 Highland Avenue,49567,M,Bloomington,Centregy,MS,25,marissaspears@centregy.com,Spears +248,West,717 Hendrickson Place,49989,M,Maury,Obliq,WA,36,westengland@obliq.com,England`, + TextFile: + `842|Meagan|F|Craig|49587|Biospan|TX|meaganbuckner@biospan.com|833 Bushwick Court|Buckner|23 +854|Jimenez|F|Moscow|49795|Verton|AL|jimenezbarry@verton.com|603 Cooper Street|Barry|25 +97|Karen|F|Fredericktown|49671|Tsunamia|MO|karentrujillo@tsunamia.com|512 Cumberland Walk|Trujillo|40 +168|Carissa|M|Neibert|49568|Zillacom|IL|carissasimon@zillacom.com|975 Flatbush Avenue|Simon|20 +240|Oconnor|F|Kilbourne|49741|Franscene|NH|oconnorclay@franscene.com|659 Highland Boulevard|Clay|35 +803|Marissa|M|Bloomington|49567|Centregy|MS|marissaspears@centregy.com|963 Highland Avenue|Spears|25 +248|West|M|Maury|49989|Obliq|WA|westengland@obliq.com|717 Hendrickson Place|England|36 +`, +}; \ No newline at end of file diff --git a/sql-workbench/.gitignore b/sql-workbench/.gitignore new file mode 100644 index 0000000000..8e32e03be9 --- /dev/null +++ b/sql-workbench/.gitignore @@ -0,0 +1,4 @@ +node_modules/ +/build/ +.cypress/screenshots +.cypress/videos diff --git a/sql-workbench/cypress.json b/sql-workbench/cypress.json new file mode 100644 index 0000000000..e5441904ad --- /dev/null +++ b/sql-workbench/cypress.json @@ -0,0 +1,13 @@ +{ + "baseUrl": "http://localhost:5601", + "video": true, + "fixturesFolder": ".cypress/fixtures", + "integrationFolder": ".cypress/integration", + "pluginsFile": ".cypress/plugins/index.js", + "screenshotsFolder": ".cypress/screenshots", + "supportFile": ".cypress/support/index.js", + "videosFolder": ".cypress/videos", + "requestTimeout": 60000, + "responseTimeout": 60000, + "defaultCommandTimeout": 60000 +} diff --git a/sql-workbench/package.json b/sql-workbench/package.json index 1b3578b135..5c06f06d41 100644 --- a/sql-workbench/package.json +++ b/sql-workbench/package.json @@ -1,12 +1,12 @@ { "name": "opendistro-sql-workbench", - "version": "1.9.0.2", + "version": "1.10.0.0", "description": "SQL Workbench", "main": "index.js", "license": "Apache-2.0", "homepage": "https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-workbench", "kibana": { - "version": "7.8.0", + "version": "7.9.0", "templateVersion": "6.3.3" }, "repository": { @@ -27,8 +27,7 @@ "brace": "0.11.1", "lodash": "^4.17.19", "react-dom": "^16.3.0", - "react-double-scrollbar": "^0.0.15", - "node": "^14.0.0" + "react-double-scrollbar": "^0.0.15" }, "devDependencies": { "@babel/plugin-proposal-class-properties": "^7.7.4", @@ -57,6 +56,7 @@ "@types/react-dom": "^16.0.5", "@types/react-router-dom": "^5.1.5", "babel-eslint": "^10.1.0", + "cypress": "^4.9.0", "eslint": "^6.8.0", "eslint-plugin-babel": "^5.2.0", "eslint-plugin-import": "^2.14.0", diff --git a/sql-workbench/yarn.lock b/sql-workbench/yarn.lock index f9e478d8b7..5b1eac86e7 100644 --- a/sql-workbench/yarn.lock +++ b/sql-workbench/yarn.lock @@ -9,28 +9,28 @@ dependencies: "@babel/highlight" "^7.10.4" -"@babel/compat-data@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.10.5.tgz#d38425e67ea96b1480a3f50404d1bf85676301a6" - integrity sha512-mPVoWNzIpYJHbWje0if7Ck36bpbtTvIxOi9+6WSK9wjGEXearAqlwBoTQvVjsAY2VIwgcs8V940geY3okzRCEw== +"@babel/compat-data@^7.10.4", "@babel/compat-data@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.11.0.tgz#e9f73efe09af1355b723a7f39b11bad637d7c99c" + integrity sha512-TPSvJfv73ng0pfnEOh17bYMPQbI95+nGWc71Ss4vZdRBHTDqmM9Z8ZV4rYz8Ks7sfzc95n30k6ODIq5UGnXcYQ== dependencies: browserslist "^4.12.0" invariant "^2.2.4" semver "^5.5.0" -"@babel/core@^7.1.0", "@babel/core@^7.7.5", "@babel/core@^7.9.0": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.10.5.tgz#1f15e2cca8ad9a1d78a38ddba612f5e7cdbbd330" - integrity sha512-O34LQooYVDXPl7QWCdW9p4NR+QlzOr7xShPPJz8GsuCU3/8ua/wqTr7gmnxXv+WBESiGU/G5s16i6tUvHkNb+w== +"@babel/core@^7.1.0", "@babel/core@^7.10.2", "@babel/core@^7.7.5": + version "7.11.1" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.11.1.tgz#2c55b604e73a40dc21b0e52650b11c65cf276643" + integrity sha512-XqF7F6FWQdKGGWAzGELL+aCO1p+lRY5Tj5/tbT3St1G8NaH70jhhDIKknIZaDans0OQBG5wRAldROLHSt44BgQ== dependencies: "@babel/code-frame" "^7.10.4" - "@babel/generator" "^7.10.5" - "@babel/helper-module-transforms" "^7.10.5" + "@babel/generator" "^7.11.0" + "@babel/helper-module-transforms" "^7.11.0" "@babel/helpers" "^7.10.4" - "@babel/parser" "^7.10.5" + "@babel/parser" "^7.11.1" "@babel/template" "^7.10.4" - "@babel/traverse" "^7.10.5" - "@babel/types" "^7.10.5" + "@babel/traverse" "^7.11.0" + "@babel/types" "^7.11.0" convert-source-map "^1.7.0" debug "^4.1.0" gensync "^1.0.0-beta.1" @@ -40,12 +40,12 @@ semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@^7.10.5": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.10.5.tgz#1b903554bc8c583ee8d25f1e8969732e6b829a69" - integrity sha512-3vXxr3FEW7E7lJZiWQ3bM4+v/Vyr9C+hpolQ8BGFr9Y8Ri2tFLWTixmwKBafDujO1WVah4fhZBeU1bieKdghig== +"@babel/generator@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.11.0.tgz#4b90c78d8c12825024568cbe83ee6c9af193585c" + integrity sha512-fEm3Uzw7Mc9Xi//qU20cBKatTfs2aOtKqmvy/Vm7RkJEGFQ4xc9myCfbXxqK//ZS8MR/ciOHw6meGASJuKmDfQ== dependencies: - "@babel/types" "^7.10.5" + "@babel/types" "^7.11.0" jsesc "^2.5.1" source-map "^0.5.0" @@ -154,11 +154,11 @@ "@babel/types" "^7.10.4" "@babel/helper-member-expression-to-functions@^7.10.4", "@babel/helper-member-expression-to-functions@^7.10.5": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.10.5.tgz#172f56e7a63e78112f3a04055f24365af702e7ee" - integrity sha512-HiqJpYD5+WopCXIAbQDG0zye5XYVvcO9w/DHp5GsaGkRUaamLj2bEtu6i8rnGGprAhHM3qidCMgp71HF4endhA== + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.11.0.tgz#ae69c83d84ee82f4b42f96e2a09410935a8f26df" + integrity sha512-JbFlKHFntRV5qKw3YC0CvQnDZ4XMwgzzBbld7Ly4Mj4cbFy3KywcR8NtNctRToMWJOVvLINJv525Gd6wwVEx/Q== dependencies: - "@babel/types" "^7.10.5" + "@babel/types" "^7.11.0" "@babel/helper-module-imports@^7.10.4": version "7.10.4" @@ -167,17 +167,17 @@ dependencies: "@babel/types" "^7.10.4" -"@babel/helper-module-transforms@^7.10.4", "@babel/helper-module-transforms@^7.10.5": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.10.5.tgz#120c271c0b3353673fcdfd8c053db3c544a260d6" - integrity sha512-4P+CWMJ6/j1W915ITJaUkadLObmCRRSC234uctJfn/vHrsLNxsR8dwlcXv9ZhJWzl77awf+mWXSZEKt5t0OnlA== +"@babel/helper-module-transforms@^7.10.4", "@babel/helper-module-transforms@^7.10.5", "@babel/helper-module-transforms@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.11.0.tgz#b16f250229e47211abdd84b34b64737c2ab2d359" + integrity sha512-02EVu8COMuTRO1TAzdMtpBPbe6aQ1w/8fePD2YgQmxZU4gpNWaL9gK3Jp7dxlkUlUCJOTaSeA+Hrm1BRQwqIhg== dependencies: "@babel/helper-module-imports" "^7.10.4" "@babel/helper-replace-supers" "^7.10.4" "@babel/helper-simple-access" "^7.10.4" - "@babel/helper-split-export-declaration" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.11.0" "@babel/template" "^7.10.4" - "@babel/types" "^7.10.5" + "@babel/types" "^7.11.0" lodash "^4.17.19" "@babel/helper-optimise-call-expression@^7.10.4": @@ -187,7 +187,7 @@ dependencies: "@babel/types" "^7.10.4" -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.8.0": +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": version "7.10.4" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz#2f75a831269d4f677de49986dff59927533cf375" integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== @@ -228,12 +228,19 @@ "@babel/template" "^7.10.4" "@babel/types" "^7.10.4" -"@babel/helper-split-export-declaration@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.10.4.tgz#2c70576eaa3b5609b24cb99db2888cc3fc4251d1" - integrity sha512-pySBTeoUff56fL5CBU2hWm9TesA4r/rOkI9DyJLvvgz09MB9YtfIYe3iBriVaYNaPe+Alua0vBIOVOLs2buWhg== +"@babel/helper-skip-transparent-expression-wrappers@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.11.0.tgz#eec162f112c2f58d3af0af125e3bb57665146729" + integrity sha512-0XIdiQln4Elglgjbwo9wuJpL/K7AGCY26kmEt0+pRP0TAj4jjyNq1MjoRvikrTVqKcx4Gysxt4cXvVFXP/JO2Q== dependencies: - "@babel/types" "^7.10.4" + "@babel/types" "^7.11.0" + +"@babel/helper-split-export-declaration@^7.10.4", "@babel/helper-split-export-declaration@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.11.0.tgz#f8a491244acf6a676158ac42072911ba83ad099f" + integrity sha512-74Vejvp6mHkGE+m+k5vHY93FX2cAtrw1zXrZXRlG4l410Nm9PxfEiVTn1PjDPV5SnmieiueY4AFg2xqhNFuuZg== + dependencies: + "@babel/types" "^7.11.0" "@babel/helper-validator-identifier@^7.10.4": version "7.10.4" @@ -268,10 +275,10 @@ chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/parser@^7.1.0", "@babel/parser@^7.10.4", "@babel/parser@^7.10.5", "@babel/parser@^7.7.0": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.5.tgz#e7c6bf5a7deff957cec9f04b551e2762909d826b" - integrity sha512-wfryxy4bE1UivvQKSQDU4/X6dr+i8bctjUjj8Zyt3DQy7NtPizJXT8M52nqpNKL+nq2PW8lxk4ZqLj0fD4B4hQ== +"@babel/parser@^7.1.0", "@babel/parser@^7.10.4", "@babel/parser@^7.11.0", "@babel/parser@^7.11.1", "@babel/parser@^7.7.0": + version "7.11.3" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.11.3.tgz#9e1eae46738bcd08e23e867bab43e7b95299a8f9" + integrity sha512-REo8xv7+sDxkKvoxEywIdsNFiZLybwdI7hcT5uEPyQrSMB4YQ973BfC9OOrD/81MaIjh6UxdulIQXkjmiH3PcA== "@babel/plugin-proposal-async-generator-functions@^7.10.4": version "7.10.5" @@ -298,6 +305,14 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-dynamic-import" "^7.8.0" +"@babel/plugin-proposal-export-namespace-from@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.10.4.tgz#570d883b91031637b3e2958eea3c438e62c05f54" + integrity sha512-aNdf0LY6/3WXkhh0Fdb6Zk9j1NMD8ovj3F6r0+3j837Pn1S1PdNtcwJ5EG9WkVPNHPxyJDaxMaAOVq4eki0qbg== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-proposal-json-strings@^7.10.4": version "7.10.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.10.4.tgz#593e59c63528160233bd321b1aebe0820c2341db" @@ -306,6 +321,14 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings" "^7.8.0" +"@babel/plugin-proposal-logical-assignment-operators@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.11.0.tgz#9f80e482c03083c87125dee10026b58527ea20c8" + integrity sha512-/f8p4z+Auz0Uaf+i8Ekf1iM7wUNLcViFUGiPxKeXvxTSl63B875YPiVdUDdem7hREcI0E0kSpEhS8tF5RphK7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-proposal-nullish-coalescing-operator@^7.10.4": version "7.10.4" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.10.4.tgz#02a7e961fc32e6d5b2db0649e01bf80ddee7e04a" @@ -322,10 +345,10 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-numeric-separator" "^7.10.4" -"@babel/plugin-proposal-object-rest-spread@^7.10.4", "@babel/plugin-proposal-object-rest-spread@^7.7.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.10.4.tgz#50129ac216b9a6a55b3853fdd923e74bf553a4c0" - integrity sha512-6vh4SqRuLLarjgeOf4EaROJAHjvu9Gl+/346PbDH9yWbJyfnJ/ah3jmYKYtswEyCoWZiidvVHjHshd4WgjB9BA== +"@babel/plugin-proposal-object-rest-spread@^7.11.0", "@babel/plugin-proposal-object-rest-spread@^7.7.4": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.11.0.tgz#bd81f95a1f746760ea43b6c2d3d62b11790ad0af" + integrity sha512-wzch41N4yztwoRw0ak+37wxwJM2oiIiy6huGCoqkvSTA9acYWcPfn9Y4aJqmFFJ70KTJUu29f3DQ43uJ9HXzEA== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-object-rest-spread" "^7.8.0" @@ -339,12 +362,13 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" -"@babel/plugin-proposal-optional-chaining@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.10.4.tgz#750f1255e930a1f82d8cdde45031f81a0d0adff7" - integrity sha512-ZIhQIEeavTgouyMSdZRap4VPPHqJJ3NEs2cuHs5p0erH+iz6khB0qfgU8g7UuJkG88+fBMy23ZiU+nuHvekJeQ== +"@babel/plugin-proposal-optional-chaining@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.11.0.tgz#de5866d0646f6afdaab8a566382fe3a221755076" + integrity sha512-v9fZIu3Y8562RRwhm1BbMRxtqZNFmFA2EG+pT2diuU8PT3H6T/KXoZ54KgYisfOFZHV6PfvAiBIZ9Rcz+/JCxA== dependencies: "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-skip-transparent-expression-wrappers" "^7.11.0" "@babel/plugin-syntax-optional-chaining" "^7.8.0" "@babel/plugin-proposal-private-methods@^7.10.4": @@ -391,6 +415,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + "@babel/plugin-syntax-import-meta@^7.8.3": version "7.10.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" @@ -412,7 +443,7 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": version "7.10.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== @@ -492,9 +523,9 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-transform-block-scoping@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.10.5.tgz#b81b8aafefbfe68f0f65f7ef397b9ece68a6037d" - integrity sha512-6Ycw3hjpQti0qssQcA6AMSFDHeNJ++R6dIMnpRqUjFeBBTmTDPa8zgF90OVfTvAo11mXZTlVUViY1g8ffrURLg== + version "7.11.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.11.1.tgz#5b7efe98852bef8d652c0b28144cd93a9e4b5215" + integrity sha512-00dYeDE0EVEHuuM+26+0w/SCL0BH2Qy7LwHuI4Hi4MH5gkC8/AqMN5uWFJIsoXZrAphiMm1iXzBw6L2T+eA0ew== dependencies: "@babel/helper-plugin-utils" "^7.10.4" @@ -717,9 +748,9 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-transform-runtime@^7.8.3": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.10.5.tgz#3b39b7b24830e0c2d8ff7a4489fe5cf99fbace86" - integrity sha512-tV4V/FjElJ9lQtyjr5xD2IFFbgY46r7EeVu5a8CpEKT5laheHKSlFeHjpkPppW3PqzGLAuv5k2qZX5LgVZIX5w== + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.11.0.tgz#e27f78eb36f19448636e05c33c90fd9ad9b8bccf" + integrity sha512-LFEsP+t3wkYBlis8w6/kmnd6Kb1dxTd+wGJ8MlxTGzQo//ehtqlVL4S9DNUa53+dtPSQobN2CXx4d81FqC58cw== dependencies: "@babel/helper-module-imports" "^7.10.4" "@babel/helper-plugin-utils" "^7.10.4" @@ -733,12 +764,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-spread@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.10.4.tgz#4e2c85ea0d6abaee1b24dcfbbae426fe8d674cff" - integrity sha512-1e/51G/Ni+7uH5gktbWv+eCED9pP8ZpRhZB3jOaI3mmzfvJTWHkuyYTv0Z5PYtyM+Tr2Ccr9kUdQxn60fI5WuQ== +"@babel/plugin-transform-spread@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.11.0.tgz#fa84d300f5e4f57752fe41a6d1b3c554f13f17cc" + integrity sha512-UwQYGOqIdQJe4aWNyS7noqAnN2VbaczPLiEtln+zPowRNlD+79w3oi2TWfYe0eZgd+gjZCbsydN7lzWysDt+gw== dependencies: "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-skip-transparent-expression-wrappers" "^7.11.0" "@babel/plugin-transform-sticky-regex@^7.10.4": version "7.10.4" @@ -764,9 +796,9 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-transform-typescript@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.10.5.tgz#edf353944e979f40d8ff9fe4e9975d0a465037c5" - integrity sha512-YCyYsFrrRMZ3qR7wRwtSSJovPG5vGyG4ZdcSAivGwTfoasMp3VOB/AKhohu3dFtmB4cCDcsndCSxGtrdliCsZQ== + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.11.0.tgz#2b4879676af37342ebb278216dd090ac67f13abb" + integrity sha512-edJsNzTtvb3MaXQwj8403B7mZoGu9ElDJQZOKjGUnvilquxBA3IQoEIOvkX/1O8xfAsnHS/oQhe2w/IXrr+w0w== dependencies: "@babel/helper-create-class-features-plugin" "^7.10.5" "@babel/helper-plugin-utils" "^7.10.4" @@ -788,29 +820,33 @@ "@babel/helper-plugin-utils" "^7.10.4" "@babel/preset-env@^7.7.6": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.10.4.tgz#fbf57f9a803afd97f4f32e4f798bb62e4b2bef5f" - integrity sha512-tcmuQ6vupfMZPrLrc38d0sF2OjLT3/bZ0dry5HchNCQbrokoQi4reXqclvkkAT5b+gWc23meVWpve5P/7+w/zw== + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.11.0.tgz#860ee38f2ce17ad60480c2021ba9689393efb796" + integrity sha512-2u1/k7rG/gTh02dylX2kL3S0IJNF+J6bfDSp4DI2Ma8QN6Y9x9pmAax59fsCk6QUQG0yqH47yJWA+u1I1LccAg== dependencies: - "@babel/compat-data" "^7.10.4" + "@babel/compat-data" "^7.11.0" "@babel/helper-compilation-targets" "^7.10.4" "@babel/helper-module-imports" "^7.10.4" "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-proposal-async-generator-functions" "^7.10.4" "@babel/plugin-proposal-class-properties" "^7.10.4" "@babel/plugin-proposal-dynamic-import" "^7.10.4" + "@babel/plugin-proposal-export-namespace-from" "^7.10.4" "@babel/plugin-proposal-json-strings" "^7.10.4" + "@babel/plugin-proposal-logical-assignment-operators" "^7.11.0" "@babel/plugin-proposal-nullish-coalescing-operator" "^7.10.4" "@babel/plugin-proposal-numeric-separator" "^7.10.4" - "@babel/plugin-proposal-object-rest-spread" "^7.10.4" + "@babel/plugin-proposal-object-rest-spread" "^7.11.0" "@babel/plugin-proposal-optional-catch-binding" "^7.10.4" - "@babel/plugin-proposal-optional-chaining" "^7.10.4" + "@babel/plugin-proposal-optional-chaining" "^7.11.0" "@babel/plugin-proposal-private-methods" "^7.10.4" "@babel/plugin-proposal-unicode-property-regex" "^7.10.4" "@babel/plugin-syntax-async-generators" "^7.8.0" "@babel/plugin-syntax-class-properties" "^7.10.4" "@babel/plugin-syntax-dynamic-import" "^7.8.0" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" "@babel/plugin-syntax-json-strings" "^7.8.0" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" "@babel/plugin-syntax-numeric-separator" "^7.10.4" "@babel/plugin-syntax-object-rest-spread" "^7.8.0" @@ -843,14 +879,14 @@ "@babel/plugin-transform-regenerator" "^7.10.4" "@babel/plugin-transform-reserved-words" "^7.10.4" "@babel/plugin-transform-shorthand-properties" "^7.10.4" - "@babel/plugin-transform-spread" "^7.10.4" + "@babel/plugin-transform-spread" "^7.11.0" "@babel/plugin-transform-sticky-regex" "^7.10.4" "@babel/plugin-transform-template-literals" "^7.10.4" "@babel/plugin-transform-typeof-symbol" "^7.10.4" "@babel/plugin-transform-unicode-escapes" "^7.10.4" "@babel/plugin-transform-unicode-regex" "^7.10.4" "@babel/preset-modules" "^0.1.3" - "@babel/types" "^7.10.4" + "@babel/types" "^7.11.0" browserslist "^4.12.0" core-js-compat "^3.6.2" invariant "^2.2.2" @@ -890,17 +926,17 @@ "@babel/plugin-transform-typescript" "^7.10.4" "@babel/runtime-corejs3@^7.10.2": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.10.5.tgz#a57fe6c13045ca33768a2aa527ead795146febe1" - integrity sha512-RMafpmrNB5E/bwdSphLr8a8++9TosnyJp98RZzI6VOx2R2CCMpsXXXRvmI700O9oEKpXdZat6oEK68/F0zjd4A== + version "7.11.2" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.11.2.tgz#02c3029743150188edeb66541195f54600278419" + integrity sha512-qh5IR+8VgFz83VBa6OkaET6uN/mJOhHONuy3m1sgF0CV6mXdPSEBdA7e1eUbVvyNtANjMbg22JUv71BaDXLY6A== dependencies: core-js-pure "^3.0.0" regenerator-runtime "^0.13.4" -"@babel/runtime@^7.0.0", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.5.tgz#303d8bd440ecd5a491eae6117fd3367698674c5c" - integrity sha512-otddXKhdNn7d0ptoFRHtMLa8LqDxLYwTjB4nYgM1yy5N6gU/MUf8zqyyLltCH3yAVitBzmwK4us+DD0l/MauAg== +"@babel/runtime@^7.0.0", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": + version "7.11.2" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.11.2.tgz#f549c13c754cc40b87644b9fa9f09a6a95fe0736" + integrity sha512-TeWkU52so0mPtDcaCTxNBI/IHiz0pZgr8VEFqXFtZWpYD08ZB6FaSwVAS8MKRQAP3bYKiVjwysOJgMFY28o6Tw== dependencies: regenerator-runtime "^0.13.4" @@ -913,25 +949,25 @@ "@babel/parser" "^7.10.4" "@babel/types" "^7.10.4" -"@babel/traverse@^7.1.0", "@babel/traverse@^7.10.4", "@babel/traverse@^7.10.5", "@babel/traverse@^7.7.0": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.10.5.tgz#77ce464f5b258be265af618d8fddf0536f20b564" - integrity sha512-yc/fyv2gUjPqzTz0WHeRJH2pv7jA9kA7mBX2tXl/x5iOE81uaVPuGPtaYk7wmkx4b67mQ7NqI8rmT2pF47KYKQ== +"@babel/traverse@^7.1.0", "@babel/traverse@^7.10.4", "@babel/traverse@^7.11.0", "@babel/traverse@^7.7.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.11.0.tgz#9b996ce1b98f53f7c3e4175115605d56ed07dd24" + integrity sha512-ZB2V+LskoWKNpMq6E5UUCrjtDUh5IOTAyIl0dTjIEoXum/iKWkoIEKIRDnUucO6f+2FzNkE0oD4RLKoPIufDtg== dependencies: "@babel/code-frame" "^7.10.4" - "@babel/generator" "^7.10.5" + "@babel/generator" "^7.11.0" "@babel/helper-function-name" "^7.10.4" - "@babel/helper-split-export-declaration" "^7.10.4" - "@babel/parser" "^7.10.5" - "@babel/types" "^7.10.5" + "@babel/helper-split-export-declaration" "^7.11.0" + "@babel/parser" "^7.11.0" + "@babel/types" "^7.11.0" debug "^4.1.0" globals "^11.1.0" lodash "^4.17.19" -"@babel/types@^7.0.0", "@babel/types@^7.10.4", "@babel/types@^7.10.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4", "@babel/types@^7.7.0": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.10.5.tgz#d88ae7e2fde86bfbfe851d4d81afa70a997b5d15" - integrity sha512-ixV66KWfCI6GKoA/2H9v6bQdbfXEwwpOdQ8cRvb4F+eyvhlaHxWFMQB4+3d9QFJXZsiiiqVrewNV0DFEQpyT4Q== +"@babel/types@^7.0.0", "@babel/types@^7.10.4", "@babel/types@^7.10.5", "@babel/types@^7.11.0", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4", "@babel/types@^7.7.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.11.0.tgz#2ae6bf1ba9ae8c3c43824e5861269871b206e90d" + integrity sha512-O53yME4ZZI0jO1EVGtF1ePGl0LHirG4P1ibcD80XyzZcKhcMFeCXmh4Xb1ifGBIV233Qg12x4rBfQgA+tmOukA== dependencies: "@babel/helper-validator-identifier" "^7.10.4" lodash "^4.17.19" @@ -950,10 +986,54 @@ exec-sh "^0.3.2" minimist "^1.2.0" +"@cypress/listr-verbose-renderer@^0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@cypress/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz#a77492f4b11dcc7c446a34b3e28721afd33c642a" + integrity sha1-p3SS9LEdzHxEajSz4ochr9M8ZCo= + dependencies: + chalk "^1.1.3" + cli-cursor "^1.0.2" + date-fns "^1.27.2" + figures "^1.7.0" + +"@cypress/request@^2.88.5": + version "2.88.5" + resolved "https://registry.yarnpkg.com/@cypress/request/-/request-2.88.5.tgz#8d7ecd17b53a849cfd5ab06d5abe7d84976375d7" + integrity sha512-TzEC1XMi1hJkywWpRfD2clreTa/Z+lOrXDCxxBTBPEcY5azdPi56A6Xw+O4tWJnaJH3iIE7G5aDXZC6JgRZLcA== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.5.0" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +"@cypress/xvfb@^1.2.4": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@cypress/xvfb/-/xvfb-1.2.4.tgz#2daf42e8275b39f4aa53c14214e557bd14e7748a" + integrity sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q== + dependencies: + debug "^3.1.0" + lodash.once "^4.1.1" + "@elastic/elasticsearch@^7.2.0": - version "7.8.0" - resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-7.8.0.tgz#3f9ee54fe8ef79874ebd231db03825fa500a7111" - integrity sha512-rUOTNN1At0KoN0Fcjd6+J7efghuURnoMTB/od9EMK6Mcdebi6N3z5ulShTsKRn6OanS9Eq3l/OmheQY1Y+WLcg== + version "7.9.0" + resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-7.9.0.tgz#098f8adbe45cba1864ea5edc59638ea983904fe5" + integrity sha512-iSLQvQafspN03YayzccShkKgJeRsUbncbtIhIL2SeiH01xwdnOZcp0nCvSNaMsH28A3YQ4ogTs9K8eXe42UaUA== dependencies: debug "^4.1.1" decompress-response "^4.2.0" @@ -1247,6 +1327,13 @@ "@nodelib/fs.scandir" "2.1.3" fastq "^1.6.0" +"@samverschueren/stream-to-observable@^0.3.0": + version "0.3.1" + resolved "https://registry.yarnpkg.com/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz#a21117b19ee9be70c379ec1877537ef2e1c63301" + integrity sha512-c/qwwcHyafOQuVQJj0IlBjf5yYgBI7YPJ77k4fOJYesb41jio65eaJODRUmfYKhTOFBrIZ66kgvGPlNbjuoRdQ== + dependencies: + any-observable "^0.3.0" + "@sinonjs/commons@^1.7.0": version "1.8.1" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.1.tgz#e7df00f98a203324f6dc7cc606cad9d4a8ab2217" @@ -1255,20 +1342,20 @@ type-detect "4.0.8" "@testing-library/dom@^7.17.1": - version "7.21.1" - resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.21.1.tgz#c59f50fddc33db34547a7860f969dbede110623d" - integrity sha512-BVFZeCtZ4cbFqOr/T8rS8q8tfK998SZeC0VcBUGBp3uEr2NVjPaImnzHPJWUx3A+JQqT01aG60SZ7kuyuZCZUQ== + version "7.22.2" + resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.22.2.tgz#6deaa828500993cc94bdd62875c251b5b5b70d69" + integrity sha512-taxURh+4Lwr//uC1Eghat95aMnTlI4G4ETosnZK0wliwHWdutLDVKIvHXAOYdXGdzrBAy1wNhSGmNBbZ72ml4g== dependencies: "@babel/runtime" "^7.10.3" "@types/aria-query" "^4.2.0" aria-query "^4.2.2" - dom-accessibility-api "^0.4.6" + dom-accessibility-api "^0.5.0" pretty-format "^25.5.0" "@testing-library/jest-dom@^5.5.0": - version "5.11.1" - resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.11.1.tgz#b9541d7625cec9e5feb647f49a96c43f7c055cdd" - integrity sha512-NHOHjDwyBoqM7mXjNLieSp/6vJ17DILzhNTw7+RarluaBkyWRzWgFj+d6xnd1adMBlwfQSeR2FWGTxHXCxeMSA== + version "5.11.3" + resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.11.3.tgz#3802cb244e9ab50559a20344698a2d41f9bf11ec" + integrity sha512-vP8ABJt4+YIzu9UItbpJ6nM5zN3g9/tpLcp2DJiXyfX9gnwgcmLsa42+YiohNGEtSUTsseb6xB9HAwlgk8WdaQ== dependencies: "@babel/runtime" "^7.9.2" "@types/testing-library__jest-dom" "^5.9.1" @@ -1282,9 +1369,9 @@ redent "^3.0.0" "@testing-library/react@^10.0.3": - version "10.4.7" - resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-10.4.7.tgz#fc14847fb70a5e93576b8f7f0d1490ead02a9061" - integrity sha512-hUYbum3X2f1ZKusKfPaooKNYqE/GtPiQ+D2HJaJ4pkxeNJQFVUEvAvEh9+3QuLdBeTWkDMNY5NSijc5+pGdM4Q== + version "10.4.8" + resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-10.4.8.tgz#5eb730291b8fd81cdb2d8877770d060b044ae4a4" + integrity sha512-clgpFR6QHiRRcdhFfAKDhH8UXpNASyfkkANhtCsCVBnai+O+mK1rGtMES+Apc7ql5Wyxu7j8dcLiC4pV5VblHA== dependencies: "@babel/runtime" "^7.10.3" "@testing-library/dom" "^7.17.1" @@ -1402,9 +1489,9 @@ "@types/shot" "*" "@types/history@*": - version "4.7.6" - resolved "https://registry.yarnpkg.com/@types/history/-/history-4.7.6.tgz#ed8fc802c45b8e8f54419c2d054e55c9ea344356" - integrity sha512-GRTZLeLJ8ia00ZH8mxMO8t0aC9M1N9bN461Z2eaRurJo6Fpa+utgCwLzI4jQHcrdzuzp5WPN9jRwpsCQ1VhJ5w== + version "4.7.7" + resolved "https://registry.yarnpkg.com/@types/history/-/history-4.7.7.tgz#613957d900fab9ff84c8dfb24fa3eef0c2a40896" + integrity sha512-2xtoL22/3Mv6a70i4+4RB7VgbDDORoWwjcqeNysojZA0R7NK17RbY5Gof/2QiFfJgX+KkWghbwJ+d/2SB8Ndzg== "@types/iron@*": version "5.0.1" @@ -1464,9 +1551,9 @@ integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= "@types/lodash@^4.14.116", "@types/lodash@^4.14.150": - version "4.14.157" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.157.tgz#fdac1c52448861dfde1a2e1515dbc46e54926dc8" - integrity sha512-Ft5BNFmv2pHDgxV5JDsndOWTRJ+56zte0ZpYLowp03tW+K+t8u8YMOzAnpuqPgzX6WO1XpDIUm7u04M8vdDiVQ== + version "4.14.159" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.159.tgz#61089719dc6fdd9c5cb46efc827f2571d1517065" + integrity sha512-gF7A72f7WQN33DpqOWw9geApQPh4M3PxluMtaHxWHXEGSN12/WbcEk/eNSqWNQcQhF66VSZ06vCF94CrHwXJDg== "@types/mime-db@*": version "1.43.0" @@ -1491,9 +1578,9 @@ integrity sha512-e9wgeY6gaY21on3ve0xAjgBVjGDWq/xUteK0ujsE53bUoxycMkqfnkUgMt6ffZtykZ5X12Mg3T7Pw4TRCObDKg== "@types/node@^13.13.4": - version "13.13.14" - resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.14.tgz#20cd7d2a98f0c3b08d379f4ea9e6b315d2019529" - integrity sha512-Az3QsOt1U/K1pbCQ0TXGELTuTkPLOiFIQf3ILzbOyo0FqgV9SxRnxbxM5QlAveERZMHpZY+7u3Jz2tKyl+yg6g== + version "13.13.15" + resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.15.tgz#fe1cc3aa465a3ea6858b793fd380b66c39919766" + integrity sha512-kwbcs0jySLxzLsa2nWUAGOd/s21WU1jebrEdtzhsj1D4Yps1EOuyI1Qcu+FD56dL7NRNIJtDDjcqIG22NwkgLw== "@types/normalize-package-data@^2.4.0": version "2.4.0" @@ -1540,9 +1627,9 @@ "@types/react" "*" "@types/react-input-autosize@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/react-input-autosize/-/react-input-autosize-2.0.2.tgz#6ccdfb100c21b6096c1a04c3c3fac196b0ce61c1" - integrity sha512-QzewaD5kog7c6w5e3dretb+50oM8RDdDvVumQKCtPjI6VHyR8lA/HxCiTrv5l9Vgbi4NCitYuix/NorOevlrng== + version "2.2.0" + resolved "https://registry.yarnpkg.com/@types/react-input-autosize/-/react-input-autosize-2.2.0.tgz#d62b07567088e547500f4693ae25dce0639c1b4e" + integrity sha512-8NO64XLmdRKUHeteXnweVnXuuSQr5HMSa4vRyNBUKOeZlimvgHPMtRchFHVHO9k7VpDoufCFYMJ6XHJ44qMTBQ== dependencies: "@types/react" "*" @@ -1588,12 +1675,12 @@ csstype "^2.2.0" "@types/react@^16.3.14": - version "16.9.43" - resolved "https://registry.yarnpkg.com/@types/react/-/react-16.9.43.tgz#c287f23f6189666ee3bebc2eb8d0f84bcb6cdb6b" - integrity sha512-PxshAFcnJqIWYpJbLPriClH53Z2WlJcVZE+NP2etUtWQs2s7yIMj3/LDKZT/5CHJ/F62iyjVCDu2H3jHEXIxSg== + version "16.9.46" + resolved "https://registry.yarnpkg.com/@types/react/-/react-16.9.46.tgz#f0326cd7adceda74148baa9bff6e918632f5069e" + integrity sha512-dbHzO3aAq1lB3jRQuNpuZ/mnu+CdD3H0WVaaBQA8LTT3S33xhVBUj232T8M3tAhSWJs/D/UqORYUlJNl/8VQZg== dependencies: "@types/prop-types" "*" - csstype "^2.2.0" + csstype "^3.0.2" "@types/shot@*": version "4.0.0" @@ -1602,15 +1689,25 @@ dependencies: "@types/node" "*" +"@types/sinonjs__fake-timers@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.1.tgz#681df970358c82836b42f989188d133e218c458e" + integrity sha512-yYezQwGWty8ziyYLdZjwxyMb0CZR49h8JALHGrxjQHWlqGgc8kLdHEgWrgL0uZ29DMvEVBDnHU2Wg36zKSIUtA== + +"@types/sizzle@^2.3.2": + version "2.3.2" + resolved "https://registry.yarnpkg.com/@types/sizzle/-/sizzle-2.3.2.tgz#a811b8c18e2babab7d542b3365887ae2e4d9de47" + integrity sha512-7EJYyKTL7tFR8+gDbB6Wwz/arpGa0Mywk1TJbNzKzHtzbwVmY4HR9WqS5VV7dsBUKQmPNr192jHr/VpBluj/hg== + "@types/stack-utils@^1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-1.0.1.tgz#0a851d3bd96498fa25c33ab7278ed3bd65f06c3e" integrity sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw== "@types/testing-library__jest-dom@^5.9.1": - version "5.9.1" - resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.9.1.tgz#aba5ee062b7880f69c212ef769389f30752806e5" - integrity sha512-yYn5EKHO3MPEMSOrcAb1dLWY+68CG29LiXKsWmmpVHqoP5+ZRiAVLyUHvPNrO2dABDdUGZvavMsaGpWNjM6N2g== + version "5.9.2" + resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.9.2.tgz#59e4771a1cf87d51e89a5cc8195cd3b647cba322" + integrity sha512-K7nUSpH/5i8i0NagTJ+uFUDRueDlnMNhJtMjMwTGPPSqyImbWC/hgKPDCKt6Phu2iMJg2kWqlax+Ucj2DKMwpA== dependencies: "@types/jest" "*" @@ -1620,9 +1717,9 @@ integrity sha512-FA/BWv8t8ZWJ+gEOnLLd8ygxH/2UFbAvgEonyfN6yWGLKc7zVjbpl2Y4CTjid9h2RfgPP6SEt6uHwEOply00yw== "@types/yargs@^13.0.0": - version "13.0.9" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-13.0.9.tgz#44028e974343c7afcf3960f1a2b1099c39a7b5e1" - integrity sha512-xrvhZ4DZewMDhoH1utLtOAwYQy60eYFoXeje30TzM3VOvQlBwQaEpKFq5m34k1wOw2AKIi2pwtiAjdmhvlBUzg== + version "13.0.10" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-13.0.10.tgz#e77bf3fc73c781d48c2eb541f87c453e321e5f4b" + integrity sha512-MU10TSgzNABgdzKvQVW1nuuT+sgBMWeXNc3XOs5YXV5SDAK+PPja2eUuBNB9iqElu03xyEDqlnGw0jgl4nbqGQ== dependencies: "@types/yargs-parser" "*" @@ -1812,9 +1909,9 @@ integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abab@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.3.tgz#623e2075e02eb2d3f2475e49f99c91846467907a" - integrity sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg== + version "2.0.4" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.4.tgz#6dfa57b417ca06d21b2478f0e638302f99c2405c" + integrity sha512-Eu9ELJWCz/c1e9gTiCY+FceWxcqzjYEbqMgtndnuSqZSUCOL73TWNK2mHfIj4Cw2E/ongOp+JISVNCmovt2KYQ== abbrev@1: version "1.1.1" @@ -1845,9 +1942,9 @@ acorn@^6.0.1, acorn@^6.4.1: integrity sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA== acorn@^7.1.0, acorn@^7.1.1: - version "7.3.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.3.1.tgz#85010754db53c3fbaf3b9ea3e083aa5c5d147ffd" - integrity sha512-tLc0wSnatxAQHVHUapaHdz72pi9KUyHjq5KyHjGg9Y8Ifdc79pTh2XvI6I1/chZbnM7QtNKzh66ooDogPZSleA== + version "7.4.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.0.tgz#e1ad486e6c54501634c6c397c5c121daa383607c" + integrity sha512-+G7P8jJmCHr+S+cLfQxygbWhXy+8YTVGzAkpEbcLo2mLoL7tij/VG41QSHACSf5QgYRhMZYHuNc6drJaO0Da+w== aggregate-error@^3.0.0: version "3.0.1" @@ -1863,14 +1960,14 @@ ajv-errors@^1.0.0: integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: - version "3.5.1" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.1.tgz#b83ca89c5d42d69031f424cad49aada0236c6957" - integrity sha512-KWcq3xN8fDjSB+IMoh2VaXVhRI0BBGxoYp3rx7Pkb6z0cFjYR9Q9l4yZqqals0/zsioCmocC5H6UvsGD4MoIBA== + version "3.5.2" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.5.5: - version "6.12.3" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.3.tgz#18c5af38a111ddeb4f2697bd78d68abc1cabd706" - integrity sha512-4K0cK3L1hsqk9xIb2z9vs/XU+PGJZ9PNpJRDS9YLzmNdX6jmVPfamLvTJr0aDAusnHyCHO6MjzlkAsgtqp9teA== +ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3: + version "6.12.4" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.4.tgz#0614facc4522127fa713445c6bfd3ebd376e2234" + integrity sha512-eienB2c9qVQs2KWexhkrdMLVDoIQCz5KSeLxwg9Lzk4DOfBtIK9PQwwufcsn1jjGuf9WZmqPMbGxOzfcuphJCQ== dependencies: fast-deep-equal "^3.1.1" fast-json-stable-stringify "^2.0.0" @@ -1899,6 +1996,11 @@ ansi-escapes@^1.1.0: resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" integrity sha1-06ioOzGapneTZisT52HHkRQiMG4= +ansi-escapes@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== + ansi-escapes@^4.2.1, ansi-escapes@^4.3.0: version "4.3.1" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.1.tgz#a5c47cc43181f1f38ffd7076837700d395522a61" @@ -1951,6 +2053,11 @@ ansi-wrap@^0.1.0: resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf" integrity sha1-qCJQ3bABXponyoLoLqYDu/pF768= +any-observable@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" + integrity sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog== + anymatch@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" @@ -1979,6 +2086,11 @@ aproba@^1.0.3, aproba@^1.1.1: resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== +arch@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/arch/-/arch-2.1.2.tgz#0c52bbe7344bb4fa260c443d2cbad9c00ff2f0bf" + integrity sha512-NTBIIbAfkJeIletyABbVtdPgeKfDafR+1mZV/AyyfC1UkVkp9iUjV+wwmqtUgphHYajbI86jejBJp5e+jkGTiQ== + are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" @@ -2100,14 +2212,15 @@ asap@~2.0.3: resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= -asn1.js@^4.0.0: - version "4.10.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" - integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== +asn1.js@^5.2.0: + version "5.4.1" + resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07" + integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== dependencies: bn.js "^4.0.0" inherits "^2.0.1" minimalistic-assert "^1.0.0" + safer-buffer "^2.1.0" asn1@~0.2.3: version "0.2.4" @@ -2159,6 +2272,11 @@ async-foreach@^0.1.3: resolved "https://registry.yarnpkg.com/async-foreach/-/async-foreach-0.1.3.tgz#36121f845c0578172de419a97dbeb1d16ec34542" integrity sha1-NhIfhFwFeBct5Bmpfb6x0W7DRUI= +async@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.0.tgz#b3a2685c5ebb641d3de02d161002c60fc9f85720" + integrity sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw== + asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -2175,9 +2293,9 @@ aws-sign2@~0.7.0: integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.8.0: - version "1.10.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.10.0.tgz#a17b3a8ea811060e74d47d306122400ad4497ae2" - integrity sha512-3YDiu347mtVtjpyV3u5kVqQLP242c06zwDOgpeRnybmXlYYsLbtTrUBUm8i8srONt+FWobl5aibnU1030PeeuA== + version "1.10.1" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.10.1.tgz#e1e82e4f3e999e2cfd61b161280d16a111f86428" + integrity sha512-zg7Hz2k5lI8kb7U32998pRRFin7zJlkfezGJjUc2heaD4Pw2wObakCDVzkKztTm/Ln7eiVvYsjqak0Ed4LkMDA== axe-core@^3.5.4: version "3.5.5" @@ -2267,14 +2385,6 @@ babel-preset-jest@^25.5.0: babel-plugin-jest-hoist "^25.5.0" babel-preset-current-node-syntax "^0.1.2" -babel-runtime@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" - integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= - dependencies: - core-js "^2.4.0" - regenerator-runtime "^0.11.0" - balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" @@ -2334,7 +2444,7 @@ block-stream@*: dependencies: inherits "~2.0.0" -bluebird@^3.5.5: +bluebird@^3.5.5, bluebird@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== @@ -2345,9 +2455,9 @@ bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.4.0: integrity sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw== bn.js@^5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.2.tgz#c9686902d3c9a27729f43ab10f9d79c2004da7b0" - integrity sha512-40rZaf3bUNKTVYu9sIeeEGOg7g14Yvnj9kH7b50EiwX0Q7A6umbvfI5tvHaOERH0XigqKkfLkFQxzb4e6CIXnA== + version "5.1.3" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.3.tgz#beca005408f642ebebea80b042b4d18d2ac0ee6b" + integrity sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ== brace-expansion@^1.1.7: version "1.1.11" @@ -2442,15 +2552,15 @@ browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: randombytes "^2.0.1" browserify-sign@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.0.tgz#545d0b1b07e6b2c99211082bf1b12cce7a0b0e11" - integrity sha512-hEZC1KEeYuoHRqhGhTy6gWrpJA3ZDjFWv0DE61643ZnOXAKJb3u7yWcrU0mMc9SwAqK1n7myPGndkp0dFG7NFA== + version "4.2.1" + resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3" + integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== dependencies: bn.js "^5.1.1" browserify-rsa "^4.0.1" create-hash "^1.2.0" create-hmac "^1.1.7" - elliptic "^6.5.2" + elliptic "^6.5.3" inherits "^2.0.4" parse-asn1 "^5.1.5" readable-stream "^3.6.0" @@ -2464,14 +2574,14 @@ browserify-zlib@^0.2.0: pako "~1.0.5" browserslist@^4.12.0, browserslist@^4.8.5: - version "4.13.0" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.13.0.tgz#42556cba011e1b0a2775b611cba6a8eca18e940d" - integrity sha512-MINatJ5ZNrLnQ6blGvePd/QOz9Xtu+Ne+x29iQSCHfkU5BugKVJwZKn/iiL8UbpIpa3JhviKjz+XxMo0m2caFQ== + version "4.14.0" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.14.0.tgz#2908951abfe4ec98737b72f34c3bcedc8d43b000" + integrity sha512-pUsXKAF2lVwhmtpeA3LJrZ76jXuusrNyhduuQs7CDFf9foT4Y38aQOserd2lMe5DSSrjf3fx34oHwryuvxAUgQ== dependencies: - caniuse-lite "^1.0.30001093" - electron-to-chromium "^1.3.488" - escalade "^3.0.1" - node-releases "^1.1.58" + caniuse-lite "^1.0.30001111" + electron-to-chromium "^1.3.523" + escalade "^3.0.2" + node-releases "^1.1.60" bs-logger@0.x: version "0.2.6" @@ -2562,6 +2672,11 @@ cache-base@^1.0.1: union-value "^1.0.0" unset-value "^1.0.0" +cachedir@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cachedir/-/cachedir-2.3.0.tgz#0c75892a052198f0b21c7c1804d8331edfcae0e8" + integrity sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw== + call-me-maybe@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" @@ -2590,10 +2705,10 @@ camelcase@^5.0.0, camelcase@^5.3.1: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -caniuse-lite@^1.0.30001093: - version "1.0.30001104" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001104.tgz#4e3d5b3b1dd3c3529f10cb7f519c62ba3e579f5d" - integrity sha512-pkpCg7dmI/a7WcqM2yfdOiT4Xx5tzyoHAXWsX5/HxZ3TemwDZs0QXdqbE0UPLPVy/7BeK7693YfzfRYfu1YVpg== +caniuse-lite@^1.0.30001111: + version "1.0.30001116" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001116.tgz#f3a3dea347f9294a3bdc4292309039cc84117fb8" + integrity sha512-f2lcYnmAI5Mst9+g0nkMIznFGsArRmZ0qU+dnq8l91hymdc2J3SFbiPhOJEeDqC1vtE8nc1qNQyklzB8veJefQ== capture-exit@^2.0.0: version "2.0.0" @@ -2607,7 +2722,7 @@ caseless@~0.12.0: resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= -chalk@^1.0.0, chalk@^1.1.1: +chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= @@ -2618,7 +2733,7 @@ chalk@^1.0.0, chalk@^1.1.1: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0: +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -2648,6 +2763,11 @@ chardet@^0.7.0: resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== +check-more-types@^2.24.0: + version "2.24.0" + resolved "https://registry.yarnpkg.com/check-more-types/-/check-more-types-2.24.0.tgz#1420ffb10fd444dcfc79b43891bbfffd32a84600" + integrity sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA= + chokidar@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" @@ -2667,10 +2787,10 @@ chokidar@^2.1.8: optionalDependencies: fsevents "^1.2.7" -chokidar@^3.4.0: - version "3.4.1" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.1.tgz#e905bdecf10eaa0a0b1db0c664481cc4cbc22ba1" - integrity sha512-TQTJyr2stihpC4Sya9hs2Xh+O2wf+igjL36Y75xx2WdHuiICcn/XJza46Jwt0eT5hVpQOzo3FpY3cj3RVYLX0g== +chokidar@^3.4.1: + version "3.4.2" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.2.tgz#38dc8e658dec3809741eb3ef7bb0a47fe424232d" + integrity sha512-IZHaDeBeI+sZJRX7lGcXsdzgvZqKv6sECqsbErJA4mHWfpRrD8B97kSFN4cQz6nGBGiuFia1MKR4d6c1o8Cv7A== dependencies: anymatch "~3.1.1" braces "~3.0.2" @@ -2734,13 +2854,20 @@ clean-stack@^2.0.0: resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== -cli-cursor@^1.0.1: +cli-cursor@^1.0.1, cli-cursor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" integrity sha1-ZNo/fValRBLll5S9Ytw1KV6PKYc= dependencies: restore-cursor "^1.0.1" +cli-cursor@^2.0.0, cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= + dependencies: + restore-cursor "^2.0.0" + cli-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" @@ -2748,6 +2875,16 @@ cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" +cli-table3@~0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.5.1.tgz#0252372d94dfc40dbd8df06005f48f31f656f202" + integrity sha512-7Qg2Jrep1S/+Q3EceiZtQcDPWxhAvBw+ERf1162v4sikJrvojMHFqXt8QIVha8UlH9rgU0BeWPytZ9/TzYqlUw== + dependencies: + object-assign "^4.1.0" + string-width "^2.1.1" + optionalDependencies: + colors "^1.1.2" + cli-truncate@2.1.0, cli-truncate@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" @@ -2756,6 +2893,14 @@ cli-truncate@2.1.0, cli-truncate@^2.1.0: slice-ansi "^3.0.0" string-width "^4.2.0" +cli-truncate@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-0.2.1.tgz#9f15cfbb0705005369216c626ac7d05ab90dd574" + integrity sha1-nxXPuwcFAFNpIWxiasfQWrkN1XQ= + dependencies: + slice-ansi "0.0.4" + string-width "^1.0.1" + cli-width@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" @@ -2808,7 +2953,7 @@ cloneable-readable@^1.0.0: process-nextick-args "^2.0.0" readable-stream "^2.3.5" -clsx@^1.0.1: +clsx@^1.0.4: version "1.1.1" resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.1.1.tgz#98b3134f9abbdf23b2663491ace13c5c03a73188" integrity sha512-6/bPho624p3S2pMyvP5kKBPXnI3ufHLObBFCfgx+LkeR5lg2XYy2hqZqUf45ypD8COn2bhgGJSUE+l5dhNBieA== @@ -2860,6 +3005,11 @@ color-name@~1.1.4: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +colors@^1.1.2: + version "1.4.0" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" + integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== + combined-stream@^1.0.6, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" @@ -2872,11 +3022,21 @@ commander@^2.12.1, commander@^2.20.0, commander@^2.9.0: resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== +commander@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" + integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== + commander@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== +common-tags@^1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" + integrity sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw== + commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" @@ -2897,7 +3057,7 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-stream@^1.4.7, concat-stream@^1.5.0: +concat-stream@^1.4.7, concat-stream@^1.5.0, concat-stream@^1.6.2: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== @@ -2969,11 +3129,6 @@ core-js@^1.0.0: resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" integrity sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY= -core-js@^2.4.0: - version "2.6.11" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.11.tgz#38831469f9922bded8ee21c9dc46985e0399308c" - integrity sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg== - core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" @@ -2991,12 +3146,12 @@ cosmiconfig@^6.0.0: yaml "^1.7.2" create-ecdh@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" - integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== + version "4.0.4" + resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" + integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== dependencies: bn.js "^4.1.0" - elliptic "^6.0.0" + elliptic "^6.5.3" create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: version "1.2.0" @@ -3111,10 +3266,15 @@ cssstyle@^2.0.0: dependencies: cssom "~0.3.6" -csstype@^2.2.0, csstype@^2.6.7: - version "2.6.11" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.11.tgz#452f4d024149ecf260a852b025e36562a253ffc5" - integrity sha512-l8YyEC9NBkSm783PFTvh0FmJy7s5pFKrDp49ZL7zBGX3fWkO+N4EEyan1qqp8cwPLDcD0OSdyY6hAMoxp34JFw== +csstype@^2.2.0: + version "2.6.13" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.13.tgz#a6893015b90e84dd6e85d0e3b442a1e84f2dbe0f" + integrity sha512-ul26pfSQTZW8dcOnD2iiJssfXw0gdNVX9IJDH/X3K5DGPfj+fUYe3kB+swUY6BF3oZDxaID3AJt+9/ojSAE05A== + +csstype@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.2.tgz#ee5ff8f208c8cd613b389f7b222c9801ca62b3f7" + integrity sha512-ofovWglpqoqbfLNOTBNZLSbMuGrblAf1efvvArGKOZMBrIoJeu5UsAipQolkijtyQx5MtAzT/J9IHj/CEY1mJw== currently-unhandled@^0.4.1: version "0.4.1" @@ -3128,6 +3288,49 @@ cyclist@^1.0.1: resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= +cypress@^4.9.0: + version "4.12.1" + resolved "https://registry.yarnpkg.com/cypress/-/cypress-4.12.1.tgz#0ead1b9f4c0917d69d8b57f996b6e01fe693b6ec" + integrity sha512-9SGIPEmqU8vuRA6xst2CMTYd9sCFCxKSzrHt0wr+w2iAQMCIIsXsQ5Gplns1sT6LDbZcmLv6uehabAOl3fhc9Q== + dependencies: + "@cypress/listr-verbose-renderer" "^0.4.1" + "@cypress/request" "^2.88.5" + "@cypress/xvfb" "^1.2.4" + "@types/sinonjs__fake-timers" "^6.0.1" + "@types/sizzle" "^2.3.2" + arch "^2.1.2" + bluebird "^3.7.2" + cachedir "^2.3.0" + chalk "^2.4.2" + check-more-types "^2.24.0" + cli-table3 "~0.5.1" + commander "^4.1.1" + common-tags "^1.8.0" + debug "^4.1.1" + eventemitter2 "^6.4.2" + execa "^1.0.0" + executable "^4.1.1" + extract-zip "^1.7.0" + fs-extra "^8.1.0" + getos "^3.2.1" + is-ci "^2.0.0" + is-installed-globally "^0.3.2" + lazy-ass "^1.6.0" + listr "^0.14.3" + lodash "^4.17.19" + log-symbols "^3.0.0" + minimist "^1.2.5" + moment "^2.27.0" + ospath "^1.2.2" + pretty-bytes "^5.3.0" + ramda "~0.26.1" + request-progress "^3.0.0" + supports-color "^7.1.0" + tmp "~0.1.0" + untildify "^4.0.0" + url "^0.11.0" + yauzl "^2.10.0" + damerau-levenshtein@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.6.tgz#143c1641cb3d85c60c32329e26899adea8701791" @@ -3149,6 +3352,11 @@ data-urls@^1.1.0: whatwg-mimetype "^2.2.0" whatwg-url "^7.0.0" +date-fns@^1.27.2: + version "1.30.1" + resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c" + integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw== + debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -3156,6 +3364,13 @@ debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: dependencies: ms "2.0.0" +debug@^3.1.0: + version "3.2.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" + integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== + dependencies: + ms "^2.1.1" + debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" @@ -3327,18 +3542,18 @@ doctrine@^3.0.0: dependencies: esutils "^2.0.2" -dom-accessibility-api@^0.4.6: - version "0.4.6" - resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.4.6.tgz#f3f2af68aee01b1c862f37918d41841bb1aaf92a" - integrity sha512-qxFVFR/ymtfamEQT/AsYLe048sitxFCoCHiM+vuOdR3fE94i3so2SCFJxyz/RxV69PZ+9FgToYWOd7eqJqcbYw== +dom-accessibility-api@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.0.tgz#fddffd04e178796e241436c3f21be2f89c91afac" + integrity sha512-eCVf9n4Ni5UQAFc2+fqfMPHdtiX7DA0rLakXgNBZfXNJzEbNo3MQIYd+zdYpFBqAaGYVrkd8leNSLGPrG4ODmA== -dom-helpers@^5.0.0: - version "5.1.4" - resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.1.4.tgz#4609680ab5c79a45f2531441f1949b79d6587f4b" - integrity sha512-TjMyeVUvNEnOnhzs6uAn9Ya47GmMo3qq7m+Lr/3ON0Rs5kHvb8I+SQYjLUSYn7qhEm0QjW0yrBkvz9yOrwwz1A== +dom-helpers@^5.1.3: + version "5.2.0" + resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.0.tgz#57fd054c5f8f34c52a3eeffdb7e7e93cd357d95b" + integrity sha512-Ru5o9+V8CpunKnz5LGgWXkmrH/20cGKwcHwS4m73zIvs54CN9epEmT/HLqFJW3kXpakAFkEdzgy1hzlJe3E4OQ== dependencies: "@babel/runtime" "^7.8.7" - csstype "^2.6.7" + csstype "^3.0.2" domain-browser@^1.1.1: version "1.2.0" @@ -3370,12 +3585,17 @@ ecc-jsbn@~0.1.1: jsbn "~0.1.0" safer-buffer "^2.1.0" -electron-to-chromium@^1.3.488: - version "1.3.501" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.501.tgz#faa17a2cb0105ee30d5e1ca87eae7d8e85dd3175" - integrity sha512-tyzuKaV2POw2mtqBBzQGNBojMZzH0MRu8bT8T/50x+hWeucyG/9pkgAATy+PcM2ySNM9+8eG2VllY9c6j4i+bg== +electron-to-chromium@^1.3.523: + version "1.3.539" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.539.tgz#9952fb0bf3fb4295282e7df35f6e7a2a8b89d3fd" + integrity sha512-rM0LWDIstdqfaRUADZetNrL6+zd/0NBmavbMEhBXgc2u/CC1d1GaDyN5hho29fFvBiOVFwrSWZkzmNcZnCEDog== -elliptic@^6.0.0, elliptic@^6.5.2: +elegant-spinner@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" + integrity sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4= + +elliptic@^6.5.3: version "6.5.3" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.3.tgz#cb59eb2efdaf73a0bd78ccd7015a62ad6e0f93d6" integrity sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw== @@ -3422,7 +3642,7 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0: dependencies: once "^1.4.0" -enhanced-resolve@^4.0.0, enhanced-resolve@^4.1.0: +enhanced-resolve@^4.0.0, enhanced-resolve@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.3.0.tgz#3b806f3bfafc1ec7de69551ef93cca46c1704126" integrity sha512-3e87LvavsdxyoCfGusJnrZ5G8SLPOFeHSNpZI/ATL9a5leXo2k0w6MKnbqhdBad9qTobSfB20Ld7UmgoNbAZkQ== @@ -3487,7 +3707,7 @@ es-to-primitive@^1.2.1: is-date-object "^1.0.1" is-symbol "^1.0.2" -escalade@^3.0.1: +escalade@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.0.2.tgz#6a580d70edb87880f22b4c91d0d56078df6962c4" integrity sha512-gPYAU37hYCUhW5euPeR+Y74F7BL+IBsV93j5cvGriSaD1aG6MGsqsV1yamRdrWrb2j3aiZvb0X+UBOWpx3JWtQ== @@ -3576,9 +3796,9 @@ eslint-plugin-import@^2.14.0: tsconfig-paths "^3.9.0" eslint-plugin-jest@^23.8.2: - version "23.18.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-23.18.0.tgz#4813eacb181820ed13c5505f400956d176b25af8" - integrity sha512-wLPM/Rm1SGhxrFQ2TKM/BYsYPhn7ch6ZEK92S2o/vGkAAnDXM0I4nTIo745RIX+VlCRMFgBuJEax6XfTHMdeKg== + version "23.20.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-23.20.0.tgz#e1d69c75f639e99d836642453c4e75ed22da4099" + integrity sha512-+6BGQt85OREevBDWCvhqj1yYA4+BFK4XnRZSGJionuEYmcglMZYLNNBBemwzbqUAckURaHdJSBcjHPyrtypZOw== dependencies: "@typescript-eslint/experimental-utils" "^2.5.0" @@ -3626,9 +3846,9 @@ eslint-plugin-prettier@^2.2.0: jest-docblock "^21.0.0" eslint-plugin-react@^7.11.1: - version "7.20.3" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.20.3.tgz#0590525e7eb83890ce71f73c2cf836284ad8c2f1" - integrity sha512-txbo090buDeyV0ugF3YMWrzLIUqpYTsWSDZV9xLSmExE1P/Kmgg9++PD931r+KEWS66O1c9R4srLVVHmeHpoAg== + version "7.20.6" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.20.6.tgz#4d7845311a93c463493ccfa0a19c9c5d0fd69f60" + integrity sha512-kidMTE5HAEBSLu23CUDvj8dc3LdBU0ri1scwHBZjI41oDv4tjsWZKU7MQccFzH1QYPYhsnTF2ovh7JlcIcmxgg== dependencies: array-includes "^3.1.1" array.prototype.flatmap "^1.2.3" @@ -3752,19 +3972,24 @@ estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0: integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== estraverse@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.1.0.tgz#374309d39fd935ae500e7b92e8a6b4c720e59642" - integrity sha512-FyohXK+R0vE+y1nHLoBM7ZTyqRpqAlhdZHCWIWEviFLiGB8b04H6bQs8G+XTthacvT8VuwvteiP7RJSxMs8UEw== + version "5.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" + integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== +eventemitter2@^6.4.2: + version "6.4.3" + resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-6.4.3.tgz#35c563619b13f3681e7eb05cbdaf50f56ba58820" + integrity sha512-t0A2msp6BzOf+QAcI6z9XMktLj52OjGQg+8SJH6v5+3uxNpWYRR3wQmfA+6xtMU9kOC59qk9licus5dYcrYkMQ== + events@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.1.0.tgz#84279af1b34cb75aa88bf5ff291f6d0bd9b31a59" - integrity sha512-Rv+u8MLHNOdMjTAFeT3nCjHn2aGlx435FP/sDHNaRhDEMwyI/aB22Kj2qIN8R0cw3z28psEQLYwxVKLsKrMgWg== + version "3.2.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.2.0.tgz#93b87c18f8efcd4202a461aec4dfc0556b639379" + integrity sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg== evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" @@ -3823,6 +4048,13 @@ execa@^4.0.1, execa@^4.0.2: signal-exit "^3.0.2" strip-final-newline "^2.0.0" +executable@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/executable/-/executable-4.1.1.tgz#41532bff361d3e57af4d763b70582db18f5d133c" + integrity sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg== + dependencies: + pify "^2.2.0" + exit-hook@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8" @@ -3910,6 +4142,16 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" +extract-zip@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.7.0.tgz#556cc3ae9df7f452c493a0cfb51cc30277940927" + integrity sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA== + dependencies: + concat-stream "^1.6.2" + debug "^2.6.9" + mkdirp "^0.5.4" + yauzl "^2.10.0" + extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" @@ -3991,12 +4233,19 @@ fbjs@^0.8.16: setimmediate "^1.0.5" ua-parser-js "^0.7.18" +fd-slicer@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" + integrity sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4= + dependencies: + pend "~1.2.0" + figgy-pudding@^3.5.1: version "3.5.2" resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== -figures@^1.3.5: +figures@^1.3.5, figures@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" integrity sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4= @@ -4004,6 +4253,13 @@ figures@^1.3.5: escape-string-regexp "^1.0.5" object-assign "^4.1.0" +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= + dependencies: + escape-string-regexp "^1.0.5" + figures@^3.0.0, figures@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" @@ -4166,6 +4422,15 @@ from2@^2.1.0: inherits "^2.0.1" readable-stream "^2.0.0" +fs-extra@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" + integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^4.0.0" + universalify "^0.1.0" + fs-mkdirp-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs-mkdirp-stream/-/fs-mkdirp-stream-1.0.0.tgz#0b7815fc3201c6a69e14db98ce098c16935259eb" @@ -4276,9 +4541,9 @@ get-stream@^4.0.0: pump "^3.0.0" get-stream@^5.0.0, get-stream@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.1.0.tgz#01203cdc92597f9b909067c3e656cc1f4d3c4dc9" - integrity sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw== + version "5.2.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== dependencies: pump "^3.0.0" @@ -4287,6 +4552,13 @@ get-value@^2.0.3, get-value@^2.0.6: resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= +getos@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/getos/-/getos-3.2.1.tgz#0134d1f4e00eb46144c5a9c0ac4dc087cbb27dc5" + integrity sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q== + dependencies: + async "^3.2.0" + getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -4350,6 +4622,13 @@ glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, gl once "^1.3.0" path-is-absolute "^1.0.0" +global-dirs@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-2.0.1.tgz#acdf3bb6685bcd55cb35e8a052266569e9469201" + integrity sha512-5HqUqdhkEovj2Of/ms3IeS/EekcO54ytHRLV4PEY2rhRwrHXLQjeVEES0Lhka0xwNDtGYn58wyC4s5+MHsOO6A== + dependencies: + ini "^1.3.5" + globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" @@ -4398,7 +4677,7 @@ globule@^1.0.0: lodash "~4.17.10" minimatch "~3.0.2" -graceful-fs@^4.0.0, graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.2, graceful-fs@^4.2.4: +graceful-fs@^4.0.0, graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.2, graceful-fs@^4.2.4: version "4.2.4" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== @@ -4440,11 +4719,11 @@ har-schema@^2.0.0: integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.1.3: - version "5.1.3" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" - integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== dependencies: - ajv "^6.5.5" + ajv "^6.12.3" har-schema "^2.0.0" has-ansi@^2.0.0: @@ -4530,9 +4809,9 @@ hash.js@^1.0.0, hash.js@^1.0.3: minimalistic-assert "^1.0.1" highlight.js@^9.12.0: - version "9.18.1" - resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.18.1.tgz#ed21aa001fe6252bb10a3d76d47573c6539fe13c" - integrity sha512-OrVKYz70LHsnCgmbXctv/bfuvntIKDz177h0Co37DQ5jamGZLVmoCVMtjMtNZY3X9DrCcKfklHPNeA0uPZhSJg== + version "9.18.3" + resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.18.3.tgz#a1a0a2028d5e3149e2380f8a865ee8516703d634" + integrity sha512-zBZAmhSupHIl5sITeMqIJnYCDfAEc3Gdkqj65wC1lpI468MMQeeQkhcIAvk+RylAkxrCcI9xy9piHiXeQ1BdzQ== hmac-drbg@^1.0.0: version "1.0.1" @@ -4681,6 +4960,11 @@ indent-string@^2.1.0: dependencies: repeating "^2.0.0" +indent-string@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" + integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= + indent-string@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" @@ -4714,6 +4998,11 @@ inherits@2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= +ini@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== + inquirer@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-1.2.3.tgz#4dec6f32f37ef7bb0b2ed3f1d1a5c3f545074918" @@ -4735,9 +5024,9 @@ inquirer@^1.2.2: through "^2.3.6" inquirer@^7.0.0: - version "7.3.2" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.2.tgz#25245d2e32dc9f33dbe26eeaada231daa66e9c7c" - integrity sha512-DF4osh1FM6l0RJc5YWYhSDB6TawiBRlbV9Cox8MWlidU218Tb7fm3lQTULyUJDfJ0tjbzl0W4q651mrCCEM55w== + version "7.3.3" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003" + integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== dependencies: ansi-escapes "^4.2.1" chalk "^4.1.0" @@ -4745,7 +5034,7 @@ inquirer@^7.0.0: cli-width "^3.0.0" external-editor "^3.0.3" figures "^3.0.0" - lodash "^4.17.16" + lodash "^4.17.19" mute-stream "0.0.8" run-async "^2.4.0" rxjs "^6.6.0" @@ -4899,9 +5188,9 @@ is-descriptor@^1.0.0, is-descriptor@^1.0.2: kind-of "^6.0.2" is-docker@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.0.0.tgz#2cb0df0e75e2d064fe1864c37cdeacb7b2dcf25b" - integrity sha512-pJEdRugimx4fBMra5z2/5iRdZ63OhYV0vr0Dwm5+xtW4D1FvRkB8hamMIhnWfyJeDdyr/aa7BDyNbtG38VxgoQ== + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156" + integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" @@ -4961,6 +5250,14 @@ is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: dependencies: is-extglob "^2.1.1" +is-installed-globally@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.3.2.tgz#fd3efa79ee670d1187233182d5b0a1dd00313141" + integrity sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g== + dependencies: + global-dirs "^2.0.1" + is-path-inside "^3.0.1" + is-negated-glob@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-negated-glob/-/is-negated-glob-1.0.0.tgz#6910bca5da8c95e784b5751b976cf5a10fee36d2" @@ -4983,6 +5280,13 @@ is-obj@^1.0.1: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= +is-observable@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" + integrity sha512-NqCa4Sa2d+u7BWc6CukaObG3Fh+CU9bvixbpcXYhy2VvYS7vVGIdAgnIS5Ks3A/cqk4rebLJ9s8zBstT2aKnIA== + dependencies: + symbol-observable "^1.1.0" + is-path-cwd@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" @@ -5000,10 +5304,15 @@ is-plain-object@^2.0.3, is-plain-object@^2.0.4: dependencies: isobject "^3.0.1" +is-promise@^2.1.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== + is-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.0.tgz#ece38e389e490df0dc21caea2bd596f987f767ff" - integrity sha512-iI97M8KTWID2la5uYXlkbSDQIg4F6o1sYboZKKTDpnDQMLtUL86zxhgDet3Q2SriaYsyGqZ6Mn2SjbRKeLHdqw== + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.1.tgz#c6f98aacc546f6cec5468a07b7b153ab564a57b9" + integrity sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg== dependencies: has-symbols "^1.0.1" @@ -5546,9 +5855,9 @@ jest@^25.5.0: jest-cli "^25.5.4" js-base64@^2.1.8: - version "2.6.3" - resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.6.3.tgz#7afdb9b57aa7717e15d370b66e8f36a9cb835dc3" - integrity sha512-fiUvdfCaAXoQTHdKMgTvg6IkecXDcVz6V5rlftUTclF9IKBjMizvSdQaCl/z/6TApDeby5NL+axYou3i0mu1Pg== + version "2.6.4" + resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.6.4.tgz#f4e686c5de1ea1f867dbcad3d46d969428df98c4" + integrity sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ== "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" @@ -5649,6 +5958,13 @@ json5@^1.0.1: dependencies: minimist "^1.2.0" +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= + optionalDependencies: + graceful-fs "^4.1.6" + jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" @@ -5713,6 +6029,11 @@ language-tags@^1.0.5: dependencies: language-subtag-registry "~0.3.2" +lazy-ass@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/lazy-ass/-/lazy-ass-1.6.0.tgz#7999655e8646c17f089fdd187d150d3324d54513" + integrity sha1-eZllXoZGwX8In90YfRUNMyTVRRM= + lazystream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.0.tgz#f6995fe0f820392f61396be89462407bb77168e4" @@ -5773,20 +6094,64 @@ lint-staged@^10.2.0: string-argv "0.3.1" stringify-object "^3.3.0" +listr-silent-renderer@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" + integrity sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4= + +listr-update-renderer@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz#4ea8368548a7b8aecb7e06d8c95cb45ae2ede6a2" + integrity sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA== + dependencies: + chalk "^1.1.3" + cli-truncate "^0.2.1" + elegant-spinner "^1.0.1" + figures "^1.7.0" + indent-string "^3.0.0" + log-symbols "^1.0.2" + log-update "^2.3.0" + strip-ansi "^3.0.1" + +listr-verbose-renderer@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz#f1132167535ea4c1261102b9f28dac7cba1e03db" + integrity sha512-04PDPqSlsqIOaaaGZ+41vq5FejI9auqTInicFRndCBgE3bXG8D6W1I+mWhk+1nqbHmyhla/6BUrd5OSiHwKRXw== + dependencies: + chalk "^2.4.1" + cli-cursor "^2.1.0" + date-fns "^1.27.2" + figures "^2.0.0" + listr2@^2.1.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/listr2/-/listr2-2.2.1.tgz#3a0abf78a7a9d9fb4121a541b524cb52e8dcbbba" - integrity sha512-WhuhT7xpVi2otpY/OzJJ8DQhf6da8MjGiEhMdA9oQquwtsSfzZt+YKlasUBer717Uocd0oPmbPeiTD7MvGzctw== + version "2.6.0" + resolved "https://registry.yarnpkg.com/listr2/-/listr2-2.6.0.tgz#788a3d202978a1b8582062952cbc49272c8e206a" + integrity sha512-nwmqTJYQQ+AsKb4fCXH/6/UmLCEDL1jkRAdSn9M6cEUzoRGrs33YD/3N86gAZQnGZ6hxV18XSdlBcJ1GTmetJA== dependencies: - chalk "^4.0.0" + chalk "^4.1.0" cli-truncate "^2.1.0" figures "^3.2.0" indent-string "^4.0.0" log-update "^4.0.0" p-map "^4.0.0" - rxjs "^6.5.5" + rxjs "^6.6.2" through "^2.3.8" +listr@^0.14.3: + version "0.14.3" + resolved "https://registry.yarnpkg.com/listr/-/listr-0.14.3.tgz#2fea909604e434be464c50bddba0d496928fa586" + integrity sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA== + dependencies: + "@samverschueren/stream-to-observable" "^0.3.0" + is-observable "^1.1.0" + is-promise "^2.1.0" + is-stream "^1.1.0" + listr-silent-renderer "^1.1.1" + listr-update-renderer "^0.5.0" + listr-verbose-renderer "^0.5.0" + p-map "^2.0.0" + rxjs "^6.3.3" + load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" @@ -5860,20 +6225,34 @@ lodash.memoize@4.x: resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= +lodash.once@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" + integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= + lodash.sortby@^4.7.0: version "4.7.0" resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= -lodash@^4.0.0, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.16, lodash@^4.17.19, lodash@^4.17.4, lodash@^4.3.0, lodash@~4.17.10: - version "4.17.19" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" - integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== +lodash@^4.0.0, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.4, lodash@^4.3.0, lodash@~4.17.10: + version "4.17.20" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" + integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== + +log-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" + integrity sha1-N2/3tY6jCGoPCfrMdGF+ylAeGhg= + dependencies: + chalk "^1.0.0" -"lodash@npm:@elastic/lodash@3.10.1-kibana4": - version "3.10.1-kibana4" - resolved "https://registry.yarnpkg.com/@elastic/lodash/-/lodash-3.10.1-kibana4.tgz#d491228fd659b4a1b0dfa08ba9c67a4979b9746d" - integrity sha512-geQqXd9ZedRCL+kq5cpeahYWYaYRV0BMXhCwzq4DpnGCVs430FTMS3Wcot3XChZZhCvkwHm15bpNjB312vPxaA== +log-symbols@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-3.0.0.tgz#f3a08516a5dea893336a7dee14d18a1cfdab77c4" + integrity sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ== + dependencies: + chalk "^2.4.2" log-symbols@^4.0.0: version "4.0.0" @@ -5882,6 +6261,15 @@ log-symbols@^4.0.0: dependencies: chalk "^4.0.0" +log-update@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" + integrity sha1-iDKP19HOeTiykoN0bwsbwSayRwg= + dependencies: + ansi-escapes "^3.0.0" + cli-cursor "^2.0.0" + wrap-ansi "^3.0.1" + log-update@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1" @@ -5899,7 +6287,7 @@ lolex@^5.0.0: dependencies: "@sinonjs/commons" "^1.7.0" -loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.3.0, loose-envify@^1.3.1, loose-envify@^1.4.0: +loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.3.1, loose-envify@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== @@ -6086,6 +6474,11 @@ mime-types@^2.1.12, mime-types@~2.1.19: dependencies: mime-db "1.44.0" +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -6147,13 +6540,18 @@ mixin-deep@^1.2.0: for-in "^1.0.2" is-extendable "^1.0.1" -mkdirp@0.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.3: +mkdirp@0.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@^0.5.4: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" +moment@^2.27.0: + version "2.27.0" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.27.0.tgz#8bff4e3e26a236220dfe3e36de756b6ebaa0105d" + integrity sha512-al0MUK7cpIcglMv3YF13qSgdAIqxHTO7brRtaz3DlSULbqfazqkc5kEjNrLDOM7fsjshoFIihnU8snrP7zUvhQ== + move-concurrently@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" @@ -6228,11 +6626,6 @@ nice-try@^1.0.4: resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== -node-bin-setup@^1.0.0: - version "1.0.6" - resolved "https://registry.yarnpkg.com/node-bin-setup/-/node-bin-setup-1.0.6.tgz#4b5c9bb937ece702d7069b36ca78af4684677528" - integrity sha512-uPIxXNis1CRbv1DwqAxkgBk5NFV3s7cMN/Gf556jSw6jBvV7ca4F9lRL/8cALcZecRibeqU+5dFYqFFmzv5a0Q== - node-fetch@^1.0.1: version "1.7.3" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.3.tgz#980f6f72d85211a5347c6b2bc18c5b84c3eb47ef" @@ -6309,10 +6702,10 @@ node-notifier@^6.0.0: shellwords "^0.1.1" which "^1.3.1" -node-releases@^1.1.58: - version "1.1.59" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.59.tgz#4d648330641cec704bff10f8e4fe28e453ab8e8e" - integrity sha512-H3JrdUczbdiwxN5FuJPyCHnGHIFqQ0wWxo+9j1kAXAzqNMAHlo+4I/sYYxpyK0irQ73HgdiyzD32oqQDcU2Osw== +node-releases@^1.1.60: + version "1.1.60" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.60.tgz#6948bdfce8286f0b5d0e5a88e8384e954dfe7084" + integrity sha512-gsO4vjEdQaTusZAEebUWp2a5d7dF5DYoIpDG7WySnk7BuZDW+GPpHXoXXuYawRBr/9t5q54tirPz79kFIWg4dA== node-sass@^4.13.1: version "4.14.1" @@ -6337,13 +6730,6 @@ node-sass@^4.13.1: stdout-stream "^1.4.0" "true-case-path" "^1.0.2" -node@^14.0.0: - version "14.5.0" - resolved "https://registry.yarnpkg.com/node/-/node-14.5.0.tgz#9b41f8e6790225a6ffae0ff3e355cad55c745c65" - integrity sha512-WxjuzzP5rI3yM/TUdajF4FzKUorUYIOtcNlsMh9a/JX5YsCBS/I2PdEe03F80Nnyfd9qZTjKnTV9XuQuuOYc6Q== - dependencies: - node-bin-setup "^1.0.0" - "nopt@2 || 3": version "3.0.6" resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" @@ -6513,10 +6899,17 @@ onetime@^1.0.0: resolved "https://registry.yarnpkg.com/onetime/-/onetime-1.1.0.tgz#a1f7838f8314c516f05ecefcbc4ccfe04b4ed789" integrity sha1-ofeDj4MUxRbwXs78vEzP4EtO14k= +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= + dependencies: + mimic-fn "^1.0.0" + onetime@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.0.tgz#fff0f3c91617fe62bb50189636e99ac8a6df7be5" - integrity sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q== + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" @@ -6572,6 +6965,11 @@ osenv@0: os-homedir "^1.0.0" os-tmpdir "^1.0.0" +ospath@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/ospath/-/ospath-1.2.2.tgz#1276639774a3f8ef2572f7fe4280e0ea4550c07b" + integrity sha1-EnZjl3Sj+O8lcvf+QoDg6kVQwHs= + p-each-series@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.1.0.tgz#961c8dd3f195ea96c747e636b262b800a6b1af48" @@ -6622,6 +7020,11 @@ p-locate@^4.1.0: dependencies: p-limit "^2.2.0" +p-map@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" + integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== + p-map@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-3.0.0.tgz#d704d9af8a2ba684e2600d9a215983d4141a979d" @@ -6668,13 +7071,12 @@ parent-module@^1.0.0: callsites "^3.0.0" parse-asn1@^5.0.0, parse-asn1@^5.1.5: - version "5.1.5" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" - integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== + version "5.1.6" + resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4" + integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== dependencies: - asn1.js "^4.0.0" + asn1.js "^5.2.0" browserify-aes "^1.0.0" - create-hash "^1.1.0" evp_bytestokey "^1.0.0" pbkdf2 "^3.0.3" safe-buffer "^5.1.1" @@ -6687,9 +7089,9 @@ parse-json@^2.2.0: error-ex "^1.2.0" parse-json@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.0.0.tgz#73e5114c986d143efa3712d4ea24db9a4266f60f" - integrity sha512-OOY5b7PAEFV0E2Fir1KOkxchnZNCdowAJgQ5NuxjpBKTRP3pQhwkrkxqQjeoKJ+fO7bCpmIZaogI4eZGDMEGOw== + version "5.0.1" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.0.1.tgz#7cfe35c1ccd641bce3981467e6c2ece61b3b3878" + integrity sha512-ztoZ4/DYeXQq4E21v169sC8qWINGpcosGv9XhTDvg9/hWvx/zrFkc9BiWxR58OJLHGk28j5BL0SDLeV2WmFZlQ== dependencies: "@babel/code-frame" "^7.0.0" error-ex "^1.3.1" @@ -6792,6 +7194,11 @@ pbkdf2@^3.0.3: safe-buffer "^5.0.1" sha.js "^2.4.8" +pend@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" + integrity sha1-elfrVQpng/kRUzH89GY9XI4AelA= + performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" @@ -6802,7 +7209,7 @@ picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1: resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== -pify@^2.0.0: +pify@^2.0.0, pify@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= @@ -6894,6 +7301,11 @@ prettier@^2.0.5: resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.5.tgz#d6d56282455243f2f92cc1716692c08aa31522d4" integrity sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg== +pretty-bytes@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.3.0.tgz#f2849e27db79fb4d6cfe24764fc4134f165989f2" + integrity sha512-hjGrh+P926p4R4WbaB6OckyRtO0F0/lQBiT+0gnxjV+5kjPBrfVBFCsCLbMqVQeydvIoouYTCmmEURiH3R1Bdg== + pretty-format@^24.9.0: version "24.9.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-24.9.0.tgz#12fac31b37019a4eea3c11aa9a959eb7628aa7c9" @@ -7055,9 +7467,14 @@ raf-schd@^4.0.2: integrity sha512-VhlMZmGy6A6hrkJWHLNTGl5gtgMUm+xfGza6wbwnE914yeQ5Ybm18vgM734RZhMgfw4tacUrWseGZlpUrrakEQ== ramda@^0.27.0: - version "0.27.0" - resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.27.0.tgz#915dc29865c0800bf3f69b8fd6c279898b59de43" - integrity sha512-pVzZdDpWwWqEVVLshWUHjNwuVP7SfcmPraYuqocJp1yo2U1R7P+5QAfDhdItkuoGqIBnBYrtPp7rEPqDn9HlZA== + version "0.27.1" + resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.27.1.tgz#66fc2df3ef873874ffc2da6aa8984658abacf5c9" + integrity sha512-PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw== + +ramda@~0.26.1: + version "0.26.1" + resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.26.1.tgz#8d41351eb8111c55353617fc3bbffad8e4d35d06" + integrity sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: version "2.1.0" @@ -7186,9 +7603,9 @@ react-lifecycles-compat@^3.0.4: integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA== react-redux@^7.1.1: - version "7.2.0" - resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-7.2.0.tgz#f970f62192b3981642fec46fd0db18a074fe879d" - integrity sha512-EvCAZYGfOLqwV7gh849xy9/pt55rJXPwmYvI4lilPM5rUT/1NxuuN59ipdBksRVSvz0KInbPnp4IfoXJXCqiDA== + version "7.2.1" + resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-7.2.1.tgz#8dedf784901014db2feca1ab633864dee68ad985" + integrity sha512-T+VfD/bvgGTUA74iW9d2i5THrDQWbweXP0AVNI8tNd1Rk5ch1rnMiJkDD67ejw7YBKM4+REvcvqRuWJb7BLuEg== dependencies: "@babel/runtime" "^7.5.5" hoist-non-react-statics "^3.3.0" @@ -7197,15 +7614,15 @@ react-redux@^7.1.1: react-is "^16.9.0" react-virtualized@^9.21.2: - version "9.21.2" - resolved "https://registry.yarnpkg.com/react-virtualized/-/react-virtualized-9.21.2.tgz#02e6df65c1e020c8dbf574ec4ce971652afca84e" - integrity sha512-oX7I7KYiUM7lVXQzmhtF4Xg/4UA5duSA+/ZcAvdWlTLFCoFYq1SbauJT5gZK9cZS/wdYR6TPGpX/dqzvTqQeBA== - dependencies: - babel-runtime "^6.26.0" - clsx "^1.0.1" - dom-helpers "^5.0.0" - loose-envify "^1.3.0" - prop-types "^15.6.0" + version "9.22.2" + resolved "https://registry.yarnpkg.com/react-virtualized/-/react-virtualized-9.22.2.tgz#217a870bad91e5438f46f01a009e1d8ce1060a5a" + integrity sha512-5j4h4FhxTdOpBKtePSs1yk6LDNT4oGtUwjT7Nkh61Z8vv3fTG/XeOf8J4li1AYaexOwTXnw0HFVxsV0GBUqwRw== + dependencies: + "@babel/runtime" "^7.7.2" + clsx "^1.0.4" + dom-helpers "^5.1.3" + loose-envify "^1.4.0" + prop-types "^15.7.2" react-lifecycles-compat "^3.0.4" react@^16.12.0: @@ -7356,15 +7773,10 @@ regenerate@^1.4.0: resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.1.tgz#cad92ad8e6b591773485fbe05a485caf4f457e6f" integrity sha512-j2+C8+NtXQgEKWk49MMP5P/u2GhnahTtVkRIHr5R5lVRlbKvmQ+oS+A5aLKWp2ma5VkT8sh6v+v4hbH0YHR66A== -regenerator-runtime@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" - integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== - regenerator-runtime@^0.13.4: - version "0.13.5" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz#d878a1d094b4306d10b9096484b33ebd55e26697" - integrity sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA== + version "0.13.7" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" + integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== regenerator-transform@^0.14.2: version "0.14.5" @@ -7462,19 +7874,26 @@ replace-ext@^1.0.0: resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.1.tgz#2d6d996d04a15855d967443631dd5f77825b016a" integrity sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw== -request-promise-core@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.3.tgz#e9a3c081b51380dfea677336061fea879a829ee9" - integrity sha512-QIs2+ArIGQVp5ZYbWD5ZLCY29D5CfWizP8eWnm8FoGD1TX61veauETVQbrV60662V0oFBkrDOuaBI8XgtuyYAQ== +request-progress@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/request-progress/-/request-progress-3.0.0.tgz#4ca754081c7fec63f505e4faa825aa06cd669dbe" + integrity sha1-TKdUCBx/7GP1BeT6qCWqBs1mnb4= dependencies: - lodash "^4.17.15" + throttleit "^1.0.0" + +request-promise-core@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.4.tgz#3eedd4223208d419867b78ce815167d10593a22f" + integrity sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw== + dependencies: + lodash "^4.17.19" request-promise-native@^1.0.7: - version "1.0.8" - resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.8.tgz#a455b960b826e44e2bf8999af64dff2bfe58cb36" - integrity sha512-dapwLGqkHtwL5AEbfenuzjTYg35Jd6KPytsC2/TLkVMz8rm+tNt72MGUWT1RP/aYawMpN6HqbNGBQaRcBtjQMQ== + version "1.0.9" + resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.9.tgz#e407120526a5efdc9a39b28a5679bf47b9d9dc28" + integrity sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g== dependencies: - request-promise-core "1.1.3" + request-promise-core "1.1.4" stealthy-require "^1.1.1" tough-cookie "^2.3.3" @@ -7568,6 +7987,14 @@ restore-cursor@^1.0.1: exit-hook "^1.0.0" onetime "^1.0.0" +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -7642,10 +8069,10 @@ rx@^4.1.0: resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" integrity sha1-pfE/957zt0D+MKqAP7CfmIBdR4I= -rxjs@^6.5.5, rxjs@^6.6.0: - version "6.6.0" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.0.tgz#af2901eedf02e3a83ffa7f886240ff9018bbec84" - integrity sha512-3HMA8z/Oz61DUHe+SdOiQyzIf4tOx5oQHmMir7IZEu6TMqCLHT4LRcmNaUS0NwOz8VLvmmBduMsoaUvMaIiqzg== +rxjs@^6.3.3, rxjs@^6.6.0, rxjs@^6.6.2: + version "6.6.2" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.2.tgz#8096a7ac03f2cc4fe5860ef6e572810d9e01c0d2" + integrity sha512-BHdBMVoWC2sL26w//BCu3YzKT4s2jip/WhwsGEDmeKYBhKDZeYezVUnHatYB7L85v5xs0BAQmg6BEYJEKxBabg== dependencies: tslib "^1.9.0" @@ -7768,10 +8195,10 @@ semver@~5.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" integrity sha1-myzl094C0XxgEq0yaqa00M9U+U8= -serialize-javascript@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-3.1.0.tgz#8bf3a9170712664ef2561b44b691eafe399214ea" - integrity sha512-JIJT1DGiWmIKhzRsG91aS6Ze4sFUrYbltlkg2onR5OrnNM02Kl/hnY/T4FN2omvyeBbQmMJv+K4cPOpGzOTFBg== +serialize-javascript@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== dependencies: randombytes "^2.1.0" @@ -7860,6 +8287,11 @@ slash@^3.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== +slice-ansi@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" + integrity sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU= + slice-ansi@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" @@ -8123,7 +8555,7 @@ string-width@^1.0.1: is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -"string-width@^1.0.2 || 2": +"string-width@^1.0.2 || 2", string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== @@ -8301,7 +8733,7 @@ supports-hyperlinks@^2.0.0: has-flag "^4.0.0" supports-color "^7.0.0" -symbol-observable@^1.2.0: +symbol-observable@^1.1.0, symbol-observable@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== @@ -8359,15 +8791,15 @@ terminal-link@^2.0.0: supports-hyperlinks "^2.0.0" terser-webpack-plugin@^1.4.3: - version "1.4.4" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.4.tgz#2c63544347324baafa9a56baaddf1634c8abfc2f" - integrity sha512-U4mACBHIegmfoEe5fdongHESNJWqsGU+W0S/9+BmYGVQDw1+c2Ow05TpMhxjPK1sRb7cuYq1BPl1e5YHJMTCqA== + version "1.4.5" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz#a217aefaea330e734ffacb6120ec1fa312d6040b" + integrity sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw== dependencies: cacache "^12.0.2" find-cache-dir "^2.1.0" is-wsl "^1.1.0" schema-utils "^1.0.0" - serialize-javascript "^3.1.0" + serialize-javascript "^4.0.0" source-map "^0.6.1" terser "^4.1.2" webpack-sources "^1.4.0" @@ -8401,6 +8833,11 @@ throat@^5.0.0: resolved "https://registry.yarnpkg.com/throat/-/throat-5.0.0.tgz#c5199235803aad18754a667d659b5e72ce16764b" integrity sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA== +throttleit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-1.0.0.tgz#9e785836daf46743145a5984b6268d828528ac6c" + integrity sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw= + through2-filter@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/through2-filter/-/through2-filter-3.0.0.tgz#700e786df2367c2c88cd8aa5be4cf9c1e7831254" @@ -8469,6 +8906,13 @@ tmp@^0.0.33: dependencies: os-tmpdir "~1.0.2" +tmp@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.1.0.tgz#ee434a4e22543082e294ba6201dcc6eafefa2877" + integrity sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw== + dependencies: + rimraf "^2.6.3" + tmpl@1.0.x: version "1.0.4" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" @@ -8615,7 +9059,7 @@ tsconfig-paths@^3.9.0: minimist "^1.2.0" strip-bom "^3.0.0" -tslib@^1.10.0, tslib@^1.7.1, tslib@^1.8.1, tslib@^1.9.0: +tslib@^1.13.0, tslib@^1.7.1, tslib@^1.8.1, tslib@^1.9.0: version "1.13.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043" integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q== @@ -8635,9 +9079,9 @@ tslint-plugin-prettier@^2.0.1: tslib "^1.7.1" tslint@^6.1.2: - version "6.1.2" - resolved "https://registry.yarnpkg.com/tslint/-/tslint-6.1.2.tgz#2433c248512cc5a7b2ab88ad44a6b1b34c6911cf" - integrity sha512-UyNrLdK3E0fQG/xWNqAFAC5ugtFyPO4JJR1KyyfQAyzR8W0fTRrC91A8Wej4BntFzcvETdCSDa/4PnNYJQLYiA== + version "6.1.3" + resolved "https://registry.yarnpkg.com/tslint/-/tslint-6.1.3.tgz#5c23b2eccc32487d5523bd3a470e9aa31789d904" + integrity sha512-IbR4nkT96EQOvKE2PW/djGz8iGNeJ4rF2mBfiYaR/nvUWYKJhLwimoJKgjIFEIDibBtOevj7BqCRL4oHeWWUCg== dependencies: "@babel/code-frame" "^7.0.0" builtin-modules "^1.1.1" @@ -8650,7 +9094,7 @@ tslint@^6.1.2: mkdirp "^0.5.3" resolve "^1.3.2" semver "^5.3.0" - tslib "^1.10.0" + tslib "^1.13.0" tsutils "^2.29.0" tsutils@^2.29.0: @@ -8793,6 +9237,11 @@ unique-stream@^2.0.2: json-stable-stringify-without-jsonify "^1.0.1" through2-filter "^3.0.0" +universalify@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + unset-value@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" @@ -8801,6 +9250,11 @@ unset-value@^1.0.0: has-value "^0.3.1" isobject "^3.0.0" +untildify@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" + integrity sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw== + upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" @@ -8991,15 +9445,15 @@ watchpack-chokidar2@^2.0.0: dependencies: chokidar "^2.1.8" -watchpack@^1.6.1: - version "1.7.2" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.7.2.tgz#c02e4d4d49913c3e7e122c3325365af9d331e9aa" - integrity sha512-ymVbbQP40MFTp+cNMvpyBpBtygHnPzPkHqoIwRRj/0B8KhqQwV8LaKjtbaxF2lK4vl8zN9wCxS46IFCU5K4W0g== +watchpack@^1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.7.4.tgz#6e9da53b3c80bb2d6508188f5b200410866cd30b" + integrity sha512-aWAgTW4MoSJzZPAicljkO1hsi1oKj/RRq/OJQh2PKI2UKL04c2Bs+MBOB+BBABHTXJpf9mCwHN7ANCvYsvY2sg== dependencies: graceful-fs "^4.1.2" neo-async "^2.5.0" optionalDependencies: - chokidar "^3.4.0" + chokidar "^3.4.1" watchpack-chokidar2 "^2.0.0" webidl-conversions@^4.0.2: @@ -9016,9 +9470,9 @@ webpack-sources@^1.4.0, webpack-sources@^1.4.1: source-map "~0.6.1" webpack@^4.41.5: - version "4.43.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.43.0.tgz#c48547b11d563224c561dad1172c8aa0b8a678e6" - integrity sha512-GW1LjnPipFW2Y78OOab8NJlCflB7EFskMih2AHdvjbpKMeDJqEgSx24cXXXiPS65+WSwVyxtDsJH6jGX2czy+g== + version "4.44.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.44.1.tgz#17e69fff9f321b8f117d1fda714edfc0b939cc21" + integrity sha512-4UOGAohv/VGUNQJstzEywwNxqX417FnjZgZJpJQegddzPmTvph37eBIRbRTfdySXzVtJXLJfbMN3mMYhM6GdmQ== dependencies: "@webassemblyjs/ast" "1.9.0" "@webassemblyjs/helper-module-context" "1.9.0" @@ -9028,7 +9482,7 @@ webpack@^4.41.5: ajv "^6.10.2" ajv-keywords "^3.4.1" chrome-trace-event "^1.0.2" - enhanced-resolve "^4.1.0" + enhanced-resolve "^4.3.0" eslint-scope "^4.0.3" json-parse-better-errors "^1.0.2" loader-runner "^2.4.0" @@ -9041,7 +9495,7 @@ webpack@^4.41.5: schema-utils "^1.0.0" tapable "^1.1.3" terser-webpack-plugin "^1.4.3" - watchpack "^1.6.1" + watchpack "^1.7.4" webpack-sources "^1.4.1" whatwg-encoding@^1.0.1, whatwg-encoding@^1.0.5: @@ -9052,9 +9506,9 @@ whatwg-encoding@^1.0.1, whatwg-encoding@^1.0.5: iconv-lite "0.4.24" whatwg-fetch@>=0.10.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.2.0.tgz#8e134f701f0a4ab5fda82626f113e2b647fd16dc" - integrity sha512-SdGPoQMMnzVYThUbSrEvqTlkvC1Ux27NehaJ/GUHBfNrh5Mjg+1/uRyFMwVnxO2MrikMWvWAqUGgQOfVU4hT7w== + version "3.4.0" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.4.0.tgz#e11de14f4878f773fbebcde8871b2c0699af8b30" + integrity sha512-rsum2ulz2iuZH08mJkT0Yi6JnKhwdw4oeyMjokgxd+mmqYSd9cPpOQf01TIWgjxG/U4+QR+AwKq6lSbXVxkyoQ== whatwg-mimetype@^2.2.0, whatwg-mimetype@^2.3.0: version "2.3.0" @@ -9113,6 +9567,14 @@ worker-farm@^1.7.0: dependencies: errno "~0.1.7" +wrap-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-3.0.1.tgz#288a04d87eda5c286e060dfe8f135ce8d007f8ba" + integrity sha1-KIoE2H7aXChuBg3+jxNc6NAH+Lo= + dependencies: + string-width "^2.1.1" + strip-ansi "^4.0.0" + wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" @@ -9242,6 +9704,14 @@ yargs@^15.3.1: y18n "^4.0.0" yargs-parser "^18.1.2" +yauzl@^2.10.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" + integrity sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk= + dependencies: + buffer-crc32 "~0.2.3" + fd-slicer "~1.1.0" + yazl@^2.5.1: version "2.5.1" resolved "https://registry.yarnpkg.com/yazl/-/yazl-2.5.1.tgz#a3d65d3dd659a5b0937850e8609f22fffa2b5c35" diff --git a/sql/src/main/antlr/OpenDistroSQLIdentifierParser.g4 b/sql/src/main/antlr/OpenDistroSQLIdentifierParser.g4 index f29895e522..131722c4e4 100644 --- a/sql/src/main/antlr/OpenDistroSQLIdentifierParser.g4 +++ b/sql/src/main/antlr/OpenDistroSQLIdentifierParser.g4 @@ -34,6 +34,14 @@ tableName : qualifiedName ; +columnName + : qualifiedName + ; + +alias + : ident + ; + qualifiedName : ident (DOT ident)* ; diff --git a/sql/src/main/antlr/OpenDistroSQLLexer.g4 b/sql/src/main/antlr/OpenDistroSQLLexer.g4 index 66e3d1bade..0806eba699 100644 --- a/sql/src/main/antlr/OpenDistroSQLLexer.g4 +++ b/sql/src/main/antlr/OpenDistroSQLLexer.g4 @@ -75,6 +75,7 @@ LIMIT: 'LIMIT'; LONG: 'LONG'; MATCH: 'MATCH'; NATURAL: 'NATURAL'; +MISSING_LITERAL: 'MISSING'; NOT: 'NOT'; NULL_LITERAL: 'NULL'; ON: 'ON'; @@ -319,7 +320,7 @@ BACKTICK_QUOTE_ID: BQUOTA_STRING; // Fragments for Literal primitives fragment EXPONENT_NUM_PART: 'E' [-+]? DEC_DIGIT+; -fragment ID_LITERAL: [*A-Z]+?[*A-Z_\-0-9]*; +fragment ID_LITERAL: [@*A-Z]+?[*A-Z_\-0-9]*; fragment DQUOTA_STRING: '"' ( '\\'. | '""' | ~('"'| '\\') )* '"'; fragment SQUOTA_STRING: '\'' ('\\'. | '\'\'' | ~('\'' | '\\'))* '\''; fragment BQUOTA_STRING: '`' ( '\\'. | '``' | ~('`'|'\\'))* '`'; diff --git a/sql/src/main/antlr/OpenDistroSQLParser.g4 b/sql/src/main/antlr/OpenDistroSQLParser.g4 index 49422c061c..3bc728ad03 100644 --- a/sql/src/main/antlr/OpenDistroSQLParser.g4 +++ b/sql/src/main/antlr/OpenDistroSQLParser.g4 @@ -68,17 +68,21 @@ selectClause ; selectElements - : (star=STAR | selectElement) (',' selectElement)* + : (star=STAR | selectElement) (COMMA selectElement)* ; selectElement - : expression #selectExpressionElement + : expression (AS? alias)? ; fromClause : FROM tableName + (whereClause)? ; +whereClause + : WHERE expression + ; // Literals @@ -88,8 +92,8 @@ constant | sign? realLiteral #signedReal | booleanLiteral #boolean | datetimeLiteral #datetime + | nullLiteral #null // Doesn't support the following types for now - //| nullLiteral #null //| BIT_STRING //| NOT? nullLiteral=(NULL_LITERAL | NULL_SPEC_LITERAL) //| LEFT_BRACE dateType=(D | T | TS | DATE | TIME | TIMESTAMP) stringLiteral RIGHT_BRACE @@ -142,15 +146,22 @@ timestampLiteral // Simplified approach for expression expression - : predicate #predicateExpression + : NOT expression #notExpression + | left=expression AND right=expression #andExpression + | left=expression OR right=expression #orExpression + | predicate #predicateExpression ; predicate : expressionAtom #expressionAtomPredicate + | left=predicate comparisonOperator right=predicate #binaryComparisonPredicate + | predicate IS nullNotnull #isNullPredicate + | left=predicate NOT? LIKE right=predicate #likePredicate ; expressionAtom : constant #constantExpressionAtom + | columnName #fullColumnNameExpressionAtom | functionCall #functionCallExpressionAtom | LR_BRACKET expression RR_BRACKET #nestedExpressionAtom | left=expressionAtom mathOperator right=expressionAtom #mathExpressionAtom @@ -160,6 +171,15 @@ mathOperator : PLUS | MINUS | STAR | DIVIDE | MODULE ; +comparisonOperator + : '=' | '>' | '<' | '<' '=' | '>' '=' + | '<' '>' | '!' '=' + ; + +nullNotnull + : NOT? NULL_LITERAL + ; + functionCall : scalarFunctionName LR_BRACKET functionArgs? RR_BRACKET #scalarFunctionCall ; diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLService.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLService.java index 599d02bddb..387da42a79 100644 --- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLService.java +++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLService.java @@ -76,15 +76,13 @@ public void execute(SQLQueryRequest request, ResponseListener lis } /** - * Given AST, run the remaining steps to execute it. - * @param ast AST + * Given physical plan, execute it and listen on response. + * @param plan physical plan * @param listener callback listener */ - public void execute(UnresolvedPlan ast, ResponseListener listener) { + public void execute(PhysicalPlan plan, ResponseListener listener) { try { - executionEngine.execute( - plan( - analyze(ast)), listener); + executionEngine.execute(plan, listener); } catch (Exception e) { listener.onFailure(e); } @@ -95,7 +93,7 @@ public void execute(UnresolvedPlan ast, ResponseListener listener */ public UnresolvedPlan parse(String query) { ParseTree cst = parser.parse(query); - return cst.accept(new AstBuilder()); + return cst.accept(new AstBuilder(query)); } /** diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java index 0774bf769f..b404062261 100644 --- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java +++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java @@ -18,42 +18,56 @@ import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.FromClauseContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.SelectClauseContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.SelectElementContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.SimpleSelectContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.WhereClauseContext; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; +import com.amazon.opendistroforelasticsearch.sql.ast.tree.Filter; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Project; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Relation; import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Values; import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; +import com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.QuerySpecificationContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParserBaseVisitor; import com.google.common.collect.ImmutableList; import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; +import java.util.Optional; +import lombok.RequiredArgsConstructor; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.ParseTree; /** * Abstract syntax tree (AST) builder. */ +@RequiredArgsConstructor public class AstBuilder extends OpenDistroSQLParserBaseVisitor { - private static final Project SELECT_ALL = null; - private final AstExpressionBuilder expressionBuilder = new AstExpressionBuilder(); + /** + * SQL query to get original token text. This is necessary because token.getText() returns + * text without whitespaces or other characters discarded by lexer. + */ + private final String query; + @Override public UnresolvedPlan visitSimpleSelect(SimpleSelectContext ctx) { QuerySpecificationContext query = ctx.querySpecification(); UnresolvedPlan project = visit(query.selectClause()); if (query.fromClause() == null) { - if (project == SELECT_ALL) { + Optional allFields = + ((Project) project).getProjectList().stream().filter(node -> node instanceof AllFields) + .findFirst(); + if (allFields.isPresent()) { throw new SyntaxCheckException("No FROM clause found for select all"); } - // Attach an Values operator with only a empty row inside so that // Project operator can have a chance to evaluate its expression // though the evaluation doesn't have any dependency on what's in Values. @@ -62,25 +76,32 @@ public UnresolvedPlan visitSimpleSelect(SimpleSelectContext ctx) { } UnresolvedPlan relation = visit(query.fromClause()); - return (project == SELECT_ALL) ? relation : project.attach(relation); + return project.attach(relation); } @Override public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) { + ImmutableList.Builder builder = + new ImmutableList.Builder<>(); if (ctx.selectElements().star != null) { //TODO: project operator should be required? - return SELECT_ALL; + builder.add(AllFields.of()); } - - List selectElements = ctx.selectElements().children; - return new Project(selectElements.stream() - .map(this::visitAstExpression) - .filter(Objects::nonNull) - .collect(Collectors.toList())); + ctx.selectElements().selectElement().forEach(field -> builder.add(visitSelectItem(field))); + return new Project(builder.build()); } @Override public UnresolvedPlan visitFromClause(FromClauseContext ctx) { - return new Relation(visitAstExpression(ctx.tableName().qualifiedName())); + UnresolvedPlan result = new Relation(visitAstExpression(ctx.tableName().qualifiedName())); + if (ctx.whereClause() != null) { + result = visit(ctx.whereClause()).attach(result); + } + return result; + } + + @Override + public UnresolvedPlan visitWhereClause(WhereClauseContext ctx) { + return new Filter(visitAstExpression(ctx.expression())); } @Override @@ -92,4 +113,25 @@ private UnresolvedExpression visitAstExpression(ParseTree tree) { return expressionBuilder.visit(tree); } + private UnresolvedExpression visitSelectItem(SelectElementContext ctx) { + String name = StringUtils.unquoteIdentifier(getTextInQuery(ctx.expression())); + UnresolvedExpression expr = visitAstExpression(ctx.expression()); + + if (ctx.alias() == null) { + return new Alias(name, expr); + } else { + String alias = StringUtils.unquoteIdentifier(ctx.alias().getText()); + return new Alias(name, expr, alias); + } + } + + /** + * Get original text in query. + */ + private String getTextInQuery(ParserRuleContext ctx) { + Token start = ctx.getStart(); + Token stop = ctx.getStop(); + return query.substring(start.getStartIndex(), stop.getStopIndex() + 1); + } + } diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java index b3fab91c47..11018fe9d3 100644 --- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java +++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java @@ -16,27 +16,46 @@ package com.amazon.opendistroforelasticsearch.sql.sql.parser; -import static com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils.unquoteIdentifier; +import static com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName.IS_NOT_NULL; +import static com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName.IS_NULL; +import static com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName.LIKE; +import static com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName.NOT_LIKE; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.BinaryComparisonPredicateContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.BooleanContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.DateLiteralContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.IsNullPredicateContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.LikePredicateContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.MathExpressionAtomContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.NotExpressionContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.NullLiteralContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.ScalarFunctionCallContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.SignedDecimalContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.SignedRealContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.StringContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.TimeLiteralContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.TimestampLiteralContext; import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.And; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Function; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Not; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.Or; import com.amazon.opendistroforelasticsearch.sql.ast.expression.QualifiedName; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; -import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser; +import com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.AndExpressionContext; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.ColumnNameContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.IdentContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.NestedExpressionAtomContext; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.OrExpressionContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.QualifiedNameContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.TableNameContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParserBaseVisitor; import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.stream.Collectors; -import org.antlr.v4.runtime.tree.RuleNode; +import org.antlr.v4.runtime.RuleContext; /** * Expression builder to parse text to expression in AST. @@ -45,22 +64,22 @@ public class AstExpressionBuilder extends OpenDistroSQLParserBaseVisitor") ? "!=" : functionName, + Arrays.asList(visit(ctx.left), visit(ctx.right)) + ); + } + + private QualifiedName visitIdentifiers(List identifiers) { + return new QualifiedName( + identifiers.stream() + .map(RuleContext::getText) + .map(StringUtils::unquoteIdentifier) + .collect(Collectors.toList()) + ); } } diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLServiceTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLServiceTest.java index ce563414ff..2f262a261f 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLServiceTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLServiceTest.java @@ -22,17 +22,15 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; -import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; import com.amazon.opendistroforelasticsearch.sql.common.response.ResponseListener; import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine; -import com.amazon.opendistroforelasticsearch.sql.sql.antlr.SQLSyntaxParser; +import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; import com.amazon.opendistroforelasticsearch.sql.sql.config.SQLServiceConfig; import com.amazon.opendistroforelasticsearch.sql.sql.domain.SQLQueryRequest; -import com.amazon.opendistroforelasticsearch.sql.sql.parser.AstBuilder; import com.amazon.opendistroforelasticsearch.sql.storage.StorageEngine; import java.util.Collections; -import org.antlr.v4.runtime.tree.ParseTree; import org.json.JSONObject; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -54,6 +52,9 @@ class SQLServiceTest { @Mock private ExecutionEngine executionEngine; + @Mock + private ExecutionEngine.Schema schema; + @BeforeEach public void setUp() { context.registerBean(StorageEngine.class, () -> storageEngine); @@ -67,7 +68,7 @@ public void setUp() { public void canExecuteSqlQuery() { doAnswer(invocation -> { ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(Collections.emptyList())); + listener.onResponse(new QueryResponse(schema, Collections.emptyList())); return null; }).when(executionEngine).execute(any(), any()); @@ -87,17 +88,14 @@ public void onFailure(Exception e) { } @Test - public void canExecuteFromAst() { + public void canExecuteFromPhysicalPlan() { doAnswer(invocation -> { ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(Collections.emptyList())); + listener.onResponse(new QueryResponse(schema, Collections.emptyList())); return null; }).when(executionEngine).execute(any(), any()); - ParseTree parseTree = new SQLSyntaxParser().parse("SELECT 123"); - UnresolvedPlan ast = parseTree.accept(new AstBuilder()); - - sqlService.execute(ast, + sqlService.execute(mock(PhysicalPlan.class), new ResponseListener() { @Override public void onResponse(QueryResponse response) { @@ -129,13 +127,10 @@ public void onFailure(Exception e) { } @Test - public void canCaptureErrorDuringExecutionFromAst() { + public void canCaptureErrorDuringExecutionFromPhysicalPlan() { doThrow(new RuntimeException()).when(executionEngine).execute(any(), any()); - ParseTree parseTree = new SQLSyntaxParser().parse("SELECT 123"); - UnresolvedPlan ast = parseTree.accept(new AstBuilder()); - - sqlService.execute(ast, + sqlService.execute(mock(PhysicalPlan.class), new ResponseListener() { @Override public void onResponse(QueryResponse response) { diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java index 94d668c4ba..0f516de40d 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java @@ -36,6 +36,26 @@ public void canParseSelectLiterals() { assertNotNull(parser.parse("SELECT 123, 'hello'")); } + @Test + public void canParseSelectLiteralWithAlias() { + assertNotNull(parser.parse("SELECT (1 + 2) * 3 AS expr")); + } + + @Test + public void canParseSelectFields() { + assertNotNull(parser.parse("SELECT name, age FROM accounts")); + } + + @Test + public void canParseSelectFieldWithAlias() { + assertNotNull(parser.parse("SELECT name AS n, age AS a FROM accounts")); + } + + @Test + public void canParseSelectFieldWithQuotedAlias() { + assertNotNull(parser.parse("SELECT name AS \"n\", age AS `a` FROM accounts")); + } + @Test public void canParseIndexNameWithDate() { assertNotNull(parser.parse("SELECT * FROM logs_2020_01")); @@ -71,10 +91,27 @@ public void canNotParseIndexNameSingleQuoted() { () -> parser.parse("SELECT * FROM 'test'")); } + @Test + public void canParseWhereClause() { + assertNotNull(parser.parse("SELECT name FROM test WHERE age = 10")); + } + + @Test + public void canParseSelectClauseWithLogicalOperator() { + assertNotNull(parser.parse( + "SELECT age = 10 AND name = 'John' OR NOT (balance > 1000) FROM test")); + } + + @Test + public void canParseWhereClauseWithLogicalOperator() { + assertNotNull(parser.parse("SELECT name FROM test " + + "WHERE age = 10 AND name = 'John' OR NOT (balance > 1000)")); + } + @Test public void canNotParseInvalidSelect() { assertThrows(SyntaxCheckException.class, - () -> parser.parse("SELECT * FROM test WHERE age = 10")); + () -> parser.parse("SELECT * FROM test WHERE age = 10 GROUP BY name")); } } \ No newline at end of file diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java index 5f047b22b5..dafb74e9c9 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java @@ -16,10 +16,14 @@ package com.amazon.opendistroforelasticsearch.sql.sql.parser; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.alias; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.booleanLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.doubleLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.filter; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.function; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.intLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.project; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.qualifiedName; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.relation; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.stringLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.values; @@ -27,6 +31,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.AllFields; import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.SQLSyntaxParser; @@ -40,29 +45,42 @@ class AstBuilderTest { */ private final SQLSyntaxParser parser = new SQLSyntaxParser(); - /** - * AST builder class that being tested. - */ - private final AstBuilder astBuilder = new AstBuilder(); - @Test - public void canBuildSelectLiterals() { + public void can_build_select_literals() { assertEquals( project( values(emptyList()), - intLiteral(123), - stringLiteral("hello"), - booleanLiteral(false), - doubleLiteral(-4.567) + alias("123", intLiteral(123)), + alias("'hello'", stringLiteral("hello")), + alias("false", booleanLiteral(false)), + alias("-4.567", doubleLiteral(-4.567)) ), buildAST("SELECT 123, 'hello', false, -4.567") ); } @Test - public void canBuildSelectAllFromIndex() { + public void can_build_select_function_call_with_alias() { assertEquals( - relation("test"), + project( + relation("test"), + alias( + "ABS(age)", + function("ABS", qualifiedName("age")), + "a" + ) + ), + buildAST("SELECT ABS(age) AS a FROM test") + ); + } + + @Test + public void can_build_select_all_from_index() { + assertEquals( + project( + relation("test"), + AllFields.of() + ), buildAST("SELECT * FROM test") ); @@ -70,16 +88,84 @@ public void canBuildSelectAllFromIndex() { } @Test - public void buildSelectFieldsFromIndex() { // TODO: change to select fields later + public void can_build_select_all_and_fields_from_index() { + assertEquals( + project( + relation("test"), + AllFields.of(), + alias("age", qualifiedName("age")), + alias("age", qualifiedName("age"), "a") + ), + buildAST("SELECT *, age, age as a FROM test") + ); + } + + @Test + public void can_build_select_fields_from_index() { + assertEquals( + project( + relation("test"), + alias("age", qualifiedName("age")) + ), + buildAST("SELECT age FROM test") + ); + } + + @Test + public void can_build_select_fields_with_alias() { assertEquals( - project(relation("test"), intLiteral(1)), - buildAST("SELECT 1 FROM test") + project( + relation("test"), + alias("age", qualifiedName("age"), "a") + ), + buildAST("SELECT age AS a FROM test") + ); + } + + @Test + public void can_build_select_fields_with_alias_quoted() { + assertEquals( + project( + relation("test"), + alias( + "name", + qualifiedName("name"), + "first name" + ), + alias( + "(age + 10)", + function("+", qualifiedName("age"), intLiteral(10)), + "Age_Expr" + ) + ), + buildAST("SELECT" + + " name AS \"first name\", " + + " (age + 10) AS `Age_Expr` " + + "FROM test" + ) + ); + } + + @Test + public void can_build_where_clause() { + assertEquals( + project( + filter( + relation("test"), + function( + "=", + qualifiedName("name"), + stringLiteral("John")) + ), + alias("name", qualifiedName("name")) + ), + buildAST("SELECT name FROM test WHERE name = 'John'") ); } private UnresolvedPlan buildAST(String query) { ParseTree parseTree = parser.parse(query); - return parseTree.accept(astBuilder); + return parseTree.accept(new AstBuilder(query)); } } \ No newline at end of file diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java index 5dece1f09f..a6e5d6f798 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java @@ -16,11 +16,15 @@ package com.amazon.opendistroforelasticsearch.sql.sql.parser; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.and; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.booleanLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.dateLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.doubleLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.function; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.intLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.not; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.nullLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.or; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.stringLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.timeLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.timestampLiteral; @@ -142,6 +146,76 @@ public void canBuildDateAndTimeFunctionCall() { ); } + @Test + public void canBuildComparisonExpression() { + assertEquals( + function("!=", intLiteral(1), intLiteral(2)), + buildExprAst("1 != 2") + ); + + assertEquals( + function("!=", intLiteral(1), intLiteral(2)), + buildExprAst("1 <> 2") + ); + } + + @Test + public void canBuildNullTestExpression() { + assertEquals( + function("is null", intLiteral(1)), + buildExprAst("1 is NULL") + ); + + assertEquals( + function("is not null", intLiteral(1)), + buildExprAst("1 IS NOT null") + ); + } + + @Test + public void canBuildNullTestExpressionWithNULLLiteral() { + assertEquals( + function("is null", nullLiteral()), + buildExprAst("NULL is NULL") + ); + + assertEquals( + function("is not null", nullLiteral()), + buildExprAst("NULL IS NOT null") + ); + } + + @Test + public void canBuildLikeExpression() { + assertEquals( + function("like", stringLiteral("str"), stringLiteral("st%")), + buildExprAst("'str' like 'st%'") + ); + + assertEquals( + function("not like", stringLiteral("str"), stringLiteral("st%")), + buildExprAst("'str' not like 'st%'") + ); + } + + @Test + public void canBuildLogicalExpression() { + assertEquals( + and(booleanLiteral(true), booleanLiteral(false)), + buildExprAst("true AND false") + ); + + assertEquals( + or(booleanLiteral(true), booleanLiteral(false)), + buildExprAst("true OR false") + ); + + assertEquals( + not(booleanLiteral(false)), + buildExprAst("NOT false") + ); + } + private Node buildExprAst(String expr) { OpenDistroSQLLexer lexer = new OpenDistroSQLLexer(new CaseInsensitiveCharStream(expr)); OpenDistroSQLParser parser = new OpenDistroSQLParser(new CommonTokenStream(lexer)); diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstQualifiedNameBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstQualifiedNameBuilderTest.java index e8c1506e7d..7f7d5cf48a 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstQualifiedNameBuilderTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstQualifiedNameBuilderTest.java @@ -42,7 +42,7 @@ public void canBuildRegularIdentifierForSQLStandard() { @Test public void canBuildRegularIdentifierForElasticsearch() { buildFromTableName(".kibana").expectQualifiedName(".kibana"); - //buildFromIdentifier("@timestamp").expectQualifiedName("@timestamp");//TODO: field name + buildFromIdentifier("@timestamp").expectQualifiedName("@timestamp"); buildFromIdentifier("logs-2020-01").expectQualifiedName("logs-2020-01"); buildFromIdentifier("*logs*").expectQualifiedName("*logs*"); }