Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions benchmarks/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ repositories {

dependencies {
implementation project(':core')
implementation project(':opensearch')

// Dependencies required by JMH micro benchmark
api group: 'org.openjdk.jmh', name: 'jmh-core', version: '1.36'
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.sql.expression.operator.predicate;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.openjdk.jmh.annotations.Benchmark;
import org.opensearch.sql.opensearch.data.type.OpenSearchDataType;
import org.opensearch.sql.opensearch.request.system.OpenSearchDescribeIndexRequest;

public class MergeArrayAndObjectMapBenchmark {
private static final List<Map<String, OpenSearchDataType>> candidateMaps = prepareListOfMaps(120);

@Benchmark
public void testMerge() {
Map<String, OpenSearchDataType> finalResult = new HashMap<>();
for (Map<String, OpenSearchDataType> map : candidateMaps) {
OpenSearchDescribeIndexRequest.mergeObjectAndArrayInsideMap(finalResult, map);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

mergeObjectAndArrayInsideMap not exist

}
}

private static Map<String, OpenSearchDataType> prepareMap(int recursive, String prefix) {
Map<String, OpenSearchDataType> map = new HashMap<>();
Map<String, Object> innerMap = prepareRecursiveMap(recursive, prefix);
map.put("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Object, innerMap));
return map;
}

public static Map<String, Object> prepareRecursiveMap(int recursive, String prefix) {
Map<String, Object> innerMap = new LinkedHashMap<>();
if (recursive == 0) {
innerMap.put("type", "string");
} else {
innerMap.put("type", "object");
innerMap.put(
"properties",
Map.of(
prefix + "_" + String.valueOf(recursive),
Map.of("type", "text"),
"recursive",
prepareRecursiveMap(recursive - 1, prefix)));
}
return innerMap;
}

private static List<Map<String, OpenSearchDataType>> prepareListOfMaps(int listNumber) {
List<Map<String, OpenSearchDataType>> list = new ArrayList<>();
for (int i = 0; i < listNumber; i++) {
list.add(prepareMap(15, "prefix" + i));
}
return list;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.opensearch.sql.expression.ReferenceExpression;
import org.opensearch.sql.expression.window.WindowDefinition;
import org.opensearch.sql.expression.window.frame.BufferPatternRowsWindowFrame;
import org.opensearch.sql.expression.window.frame.CurrentRowWindowFrame;
import org.opensearch.sql.expression.window.frame.WindowFrame;

@Warmup(iterations = 1)
Expand Down Expand Up @@ -62,14 +61,6 @@ public class PatternsWindowFunctionBenchmark {
new BrainLogParser(),
new NamedArgumentExpression("message", new ReferenceExpression("message", STRING)));

@Benchmark
public void testSimplePattern() {
CurrentRowWindowFrame windowFrame =
new CurrentRowWindowFrame(new WindowDefinition(ImmutableList.of(), ImmutableList.of()));

run(windowFrame, DSL.simple_pattern(DSL.ref("message", STRING)));
}

@Benchmark
public void testBrain() {
BufferPatternRowsWindowFrame windowFrame =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@

package org.opensearch.sql.calcite.standalone;

import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT;
import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ALIAS;
import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK;
import static org.opensearch.sql.legacy.TestsConstants.*;
import static org.opensearch.sql.util.MatcherUtils.rows;
import static org.opensearch.sql.util.MatcherUtils.schema;
import static org.opensearch.sql.util.MatcherUtils.verifyDataRows;
Expand Down Expand Up @@ -38,6 +36,8 @@ public void init() throws IOException {

loadIndex(Index.BANK);
loadIndex(Index.DATA_TYPE_ALIAS);
loadIndex(Index.MERGE_TEST_1);
loadIndex(Index.MERGE_TEST_2);
Comment on lines +39 to +40
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

}

@Test
Expand Down Expand Up @@ -566,6 +566,30 @@ public void testMetaFieldAlias() {
}

@Test
public void testFieldsMergedObject() {
JSONObject result =
executeQuery(
String.format(
"source=%s | fields machine.os1, machine.os2, machine_array.os1, "
+ " machine_array.os2, machine_deep.attr1, machine_deep.attr2,"
+ " machine_deep.layer.os1, machine_deep.layer.os2",
TEST_INDEX_MERGE_TEST_WILDCARD));
verifySchema(
result,
schema("machine.os1", "string"),
schema("machine.os2", "string"),
schema("machine_array.os1", "string"),
schema("machine_array.os2", "string"),
schema("machine_deep.attr1", "long"),
schema("machine_deep.attr2", "long"),
schema("machine_deep.layer.os1", "string"),
schema("machine_deep.layer.os2", "string"));
verifyDataRows(
result,
rows("linux", null, "linux", null, 1, null, "os1", null),
rows(null, "linux", null, "linux", null, 2, null, "os2"));
}

public void testNumericLiteral() {
JSONObject result =
executeQuery(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public void testCaseWhenWithCast() {
cast(response as int) >= 400 AND cast(response as int) < 500, "Client Error",
cast(response as int) >= 500 AND cast(response as int) < 600, "Server Error"
else concat("Incorrect HTTP status code for", url))
| where status != "Success"
| where status != "Success" | fields host, method, message, bytes, response, url, status
""",
TEST_INDEX_WEBLOGS));
verifySchema(
Expand Down Expand Up @@ -114,7 +114,7 @@ public void testCaseWhenNoElse() {
cast(response as int) >= 300 AND cast(response as int) < 400, "Redirection",
cast(response as int) >= 400 AND cast(response as int) < 500, "Client Error",
cast(response as int) >= 500 AND cast(response as int) < 600, "Server Error")
| where isnull(status) OR status != "Success"
| where isnull(status) OR status != "Success" | fields host, method, message, bytes, response, url, status
""",
TEST_INDEX_WEBLOGS));
verifySchema(
Expand Down Expand Up @@ -156,7 +156,7 @@ response in ('300', '301'), "Redirection",
response in ('400', '403'), "Client Error",
response in ('500', '505'), "Server Error"
else concat("Incorrect HTTP status code for", url))
| where status != "Success"
| where status != "Success" | fields host, method, message, bytes, response, url, status
""",
TEST_INDEX_WEBLOGS));
verifySchema(
Expand Down Expand Up @@ -205,6 +205,7 @@ response in ('300', '301'), false,
response in ('400', '403'), false,
response in ('500', '505'), false
else false)
| fields host, method, message, bytes, response, url
""",
TEST_INDEX_WEBLOGS));
verifySchema(
Expand Down Expand Up @@ -240,6 +241,7 @@ response in ('500', '505'), "500"
else concat("Incorrect HTTP status code for", url))
| fields new_response
]
| fields host, method, message, bytes, response, url
""",
TEST_INDEX_WEBLOGS, TEST_INDEX_WEBLOGS));
verifySchema(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -818,6 +818,16 @@ public enum Index {
"hobbies",
getHobbiesIndexMapping(),
"src/test/resources/hobbies.json"),
MERGE_TEST_1(
TestsConstants.TEST_INDEX_MERGE_TEST_1,
"merge_test1",
getMappingFile("merge_test_1_mapping.json"),
"src/test/resources/merge_test_1.json"),
MERGE_TEST_2(
TestsConstants.TEST_INDEX_MERGE_TEST_2,
"merge_test2",
getMappingFile("merge_test_2_mapping.json"),
"src/test/resources/merge_test_2.json"),
// It's "people" table in Spark PPL ITs, to avoid conflicts, rename to "worker" here
WORKER(
TestsConstants.TEST_INDEX_WORKER,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ public class TestsConstants {
public static final String TEST_INDEX_WORKER = TEST_INDEX + "_worker";
public static final String TEST_INDEX_WORK_INFORMATION = TEST_INDEX + "_work_information";
public static final String TEST_INDEX_DUPLICATION_NULLABLE = TEST_INDEX + "_duplication_nullable";
public static final String TEST_INDEX_MERGE_TEST_1 = TEST_INDEX + "_merge_test_1";
public static final String TEST_INDEX_MERGE_TEST_2 = TEST_INDEX + "_merge_test_2";
public static final String TEST_INDEX_MERGE_TEST_WILDCARD = TEST_INDEX + "_merge_test_*";

public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
public static final String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@

package org.opensearch.sql.ppl;

import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT;
import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK;
import static org.opensearch.sql.legacy.TestsConstants.*;
import static org.opensearch.sql.util.MatcherUtils.columnName;
import static org.opensearch.sql.util.MatcherUtils.columnPattern;
import static org.opensearch.sql.util.MatcherUtils.rows;
Expand All @@ -28,6 +27,8 @@ public void init() throws Exception {
super.init();
loadIndex(Index.ACCOUNT);
loadIndex(Index.BANK);
loadIndex(Index.MERGE_TEST_1);
loadIndex(Index.MERGE_TEST_2);
}

@Test
Expand Down Expand Up @@ -105,4 +106,29 @@ public void testMetadataFieldsWithEvalMetaField() {
"source=%s | eval _id = 1 | fields firstname, _id", TEST_INDEX_ACCOUNT)));
verifyErrorMessageContains(e, "Cannot use metadata field [_id] as the eval field.");
}

@Test
public void testFieldsMergedObject() throws IOException {
JSONObject result =
executeQuery(
String.format(
"source=%s | fields machine.os1, machine.os2, machine_array.os1, "
+ " machine_array.os2, machine_deep.attr1, machine_deep.attr2,"
+ " machine_deep.layer.os1, machine_deep.layer.os2",
TEST_INDEX_MERGE_TEST_WILDCARD));
verifySchema(
result,
schema("machine.os1", "string"),
schema("machine.os2", "string"),
schema("machine_array.os1", "string"),
schema("machine_array.os2", "string"),
schema("machine_deep.attr1", "bigint"),
schema("machine_deep.attr2", "bigint"),
schema("machine_deep.layer.os1", "string"),
schema("machine_deep.layer.os2", "string"));
verifyDataRows(
result,
rows("linux", null, "linux", null, 1, null, "os1", null),
rows(null, "linux", null, "linux", null, 2, null, "os2"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,8 @@ public void testGeoIpEnrichment() {
JSONObject resultGeoIp =
executeQuery(
String.format(
"search source=%s | eval enrichmentResult = geoip(\\\"%s\\\",%s)",
"search source=%s | eval enrichmentResult = geoip(\\\"%s\\\",%s) | fields name, ip,"
+ " enrichmentResult",
TEST_INDEX_GEOIP, "dummycityindex", "ip"));

verifyColumn(resultGeoIp, columnName("name"), columnName("ip"), columnName("enrichmentResult"));
Expand All @@ -100,7 +101,8 @@ public void testGeoIpEnrichmentWithSingleOption() {
JSONObject resultGeoIp =
executeQuery(
String.format(
"search source=%s | eval enrichmentResult = geoip(\\\"%s\\\",%s,\\\"%s\\\")",
"search source=%s | eval enrichmentResult = geoip(\\\"%s\\\",%s,\\\"%s\\\") |"
+ " fields name, ip, enrichmentResult",
TEST_INDEX_GEOIP, "dummycityindex", "ip", "city"));

verifyColumn(resultGeoIp, columnName("name"), columnName("ip"), columnName("enrichmentResult"));
Expand All @@ -117,7 +119,8 @@ public void testGeoIpEnrichmentWithSpaceSeparatedMultipleOptions() {
JSONObject resultGeoIp =
executeQuery(
String.format(
"search source=%s | eval enrichmentResult = geoip(\\\"%s\\\",%s,\\\"%s\\\")",
"search source=%s | eval enrichmentResult = geoip(\\\"%s\\\",%s,\\\"%s\\\") |"
+ " fields name, ip, enrichmentResult",
TEST_INDEX_GEOIP, "dummycityindex", "ip", "city , country"));

verifyColumn(resultGeoIp, columnName("name"), columnName("ip"), columnName("enrichmentResult"));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
{
"mappings": {
"properties": {
"machine": {
"properties": {
"os1": {
"type": "text"
},
"ram1": {
"type": "long"
}
}
},
"machine_deep": {
"properties": {
"attr1": {
"type": "long"
},
"layer": {
"properties": {
"os1": {
"type": "text"
}
}
}
}
},
"machine_array": {
"type": "nested",
"properties": {
"os1": {
"type": "text"
},
"ram1": {
"type": "long"
}
}
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
{
"mappings": {
"properties": {
"machine": {
"properties": {
"os2": {
"type": "text"
},
"ram2": {
"type": "long"
}
}
},
"machine_deep": {
"properties": {
"attr2": {
"type": "long"
},
"layer": {
"properties": {
"os2": {
"type": "text"
}
}
}
}
},
"machine_array": {
"type": "nested",
"properties": {
"os2": {
"type": "text"
},
"ram2": {
"type": "long"
}
}
}
}
}
}
2 changes: 2 additions & 0 deletions integ-test/src/test/resources/merge_test_1.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{"index":{"_id":"1"}}
{"machine": {"os1": "linux", "ram1": 120}, "machine_array": [{"os1": "linux", "ram1": 120}], "machine_deep": {"attr1": 1, "layer": {"os1": "os1"}}}
2 changes: 2 additions & 0 deletions integ-test/src/test/resources/merge_test_2.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{"index": {}}
{"machine": {"os2": "linux", "ram2": 120}, "machine_array": [{"os2": "linux", "ram2": 120}],"machine_deep": {"attr2": 2, "layer": {"os2": "os2"}}}
Loading
Loading