Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Test] Fix MR unit test failures #35

Merged
merged 2 commits into from
Dec 1, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ public Resource(Settings settings, boolean read) {
int slash = res.indexOf("/");
boolean typeExists = slash >= 0;

OpenSearchMajorVersion opensearchMajorVersion = settings.getInternalVersionOrThrow();
// Types can no longer the specified at all! Index names only!
if (typeExists) {
throw new OpenSearchHadoopIllegalArgumentException(String.format(
Expand Down
5 changes: 5 additions & 0 deletions mr/src/main/java/org/opensearch/hadoop/rest/RestClient.java
Original file line number Diff line number Diff line change
Expand Up @@ -708,6 +708,11 @@ public ClusterInfo mainInfo() {
throw new OpenSearchHadoopIllegalStateException("Unable to retrieve OpenSearch version.");
}
String versionNumber = versionBody.get("number");
OpenSearchMajorVersion major = OpenSearchMajorVersion.parse(versionNumber);
if (major.before(OpenSearchMajorVersion.V_2_X)) {
throw new OpenSearchHadoopIllegalStateException("Invalid major version [" + major + "]. " +
"Version is lower than minimum required version [" + OpenSearchMajorVersion.V_2_X + "].");
}
return new ClusterInfo(new ClusterName(clusterName, clusterUUID), OpenSearchMajorVersion.parse(versionNumber));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,12 @@ public boolean onOrBefore(OpenSearchMajorVersion version) {
}

public static OpenSearchMajorVersion parse(String version) {
if (version.startsWith("0.")) {
return new OpenSearchMajorVersion((byte) 0, version);
}
if (version.startsWith("1.")) {
return new OpenSearchMajorVersion((byte) 1, version);
}
if (version.startsWith("2.")) {
return new OpenSearchMajorVersion((byte) 2, version);
}
Expand Down
7 changes: 0 additions & 7 deletions mr/src/test/java/org/opensearch/hadoop/rest/QueryTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,6 @@ public void setup() {
builder = new SearchRequestBuilder(true);
}

@Test
public void testSimpleQuery() {
cfg.setResourceRead("foo/bar");
Resource typed = new Resource(cfg, true);
assertTrue(builder.resource(typed).toString().contains("foo/bar"));
}

@Test
public void testSimpleQueryTypeless() {
cfg.setInternalVersion(OpenSearchMajorVersion.LATEST);
Expand Down
121 changes: 9 additions & 112 deletions mr/src/test/java/org/opensearch/hadoop/rest/RestClientTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,42 +55,6 @@

public class RestClientTest {

@Test
public void testPostDocumentSuccess() throws Exception {
String index = "index/type";
Settings settings = new TestSettings();
settings.setInternalVersion(OpenSearchMajorVersion.V_2_X);
settings.setResourceWrite(index);
Resource writeResource = new Resource(settings, false);
BytesArray document = new BytesArray("{\"field\":\"value\"}");
SimpleRequest request = new SimpleRequest(Request.Method.POST, null, index, null, document);
String response =
"{\n" +
" \"_index\": \"index\",\n" +
" \"_type\": \"type\",\n" +
" \"_id\": \"AbcDefGhiJklMnoPqrS_\",\n" +
" \"_version\": 1,\n" +
" \"result\": \"created\",\n" +
" \"_shards\": {\n" +
" \"total\": 2,\n" +
" \"successful\": 1,\n" +
" \"failed\": 0\n" +
" },\n" +
" \"_seq_no\": 0,\n" +
" \"_primary_term\": 1\n" +
"}";

NetworkClient mock = Mockito.mock(NetworkClient.class);
Mockito.when(mock.execute(Mockito.eq(request), Mockito.eq(true)))
.thenReturn(new SimpleResponse(201, new FastByteArrayInputStream(new BytesArray(response)), "localhost:9200"));

RestClient client = new RestClient(new TestSettings(), mock);

String id = client.postDocument(writeResource, document);

assertEquals("AbcDefGhiJklMnoPqrS_", id);
}

@Test
public void testPostTypelessDocumentSuccess() throws Exception {
String index = "index";
Expand Down Expand Up @@ -127,45 +91,6 @@ public void testPostTypelessDocumentSuccess() throws Exception {
assertEquals("AbcDefGhiJklMnoPqrS_", id);
}

@Test(expected = OpenSearchHadoopInvalidRequest.class)
public void testPostDocumentWithTypeFailure() throws Exception {
String index = "index/type";
Settings settings = new TestSettings();
settings.setInternalVersion(OpenSearchMajorVersion.V_3_X);
settings.setResourceWrite(index);
Resource writeResource = new Resource(settings, false);
BytesArray document = new BytesArray("{\"field\":\"value\"}");
SimpleRequest request = new SimpleRequest(Request.Method.POST, null, index, null, document);
String response =
"{\n" +
" \"error\": {\n" +
" \"root_cause\": [\n" +
" {\n" +
" \"type\": \"io_exception\",\n" +
" \"reason\": \"test failure\"\n" +
" }\n" +
" ],\n" +
" \"type\": \"io_exception\",\n" +
" \"reason\": \"test failure\",\n" +
" \"caused_by\": {\n" +
" \"type\": \"io_exception\",\n" +
" \"reason\": \"This test needs to fail\"\n" +
" }\n" +
" },\n" +
" \"status\": 400\n" +
"}";

NetworkClient mock = Mockito.mock(NetworkClient.class);
Mockito.when(mock.execute(Mockito.eq(request), Mockito.eq(true)))
.thenReturn(new SimpleResponse(400, new FastByteArrayInputStream(new BytesArray(response)), "localhost:9200"));

RestClient client = new RestClient(new TestSettings(), mock);

client.postDocument(writeResource, document);

fail("Request should have failed");
}

@Test(expected = OpenSearchHadoopInvalidRequest.class)
public void testPostTypelessDocumentFailure() throws Exception {
String index = "index";
Expand Down Expand Up @@ -332,9 +257,9 @@ public void testMainInfo() {
"\"cluster_name\": \"cluster\",\n" +
"\"cluster_uuid\": \"uuid\",\n" +
"\"version\": {\n" +
" \"number\": \"7.14.0\"\n" +
" \"number\": \"2.4.0\"\n" +
"},\n" +
"\"tagline\": \"You Know, for Search\"\n" +
"\"tagline\": \"The OpenSearch Project: https://opensearch.org/\"\n" +
"}";

NetworkClient mock = Mockito.mock(NetworkClient.class);
Expand All @@ -357,9 +282,9 @@ public void testMainInfoWithClusterTooOld() {
"\"name\": \"node\",\n" +
"\"cluster_name\": \"cluster\",\n" +
"\"version\": {\n" +
" \"number\": \"2.0.0\"\n" +
" \"number\": \"1.0.0\"\n" +
"},\n" +
"\"tagline\": \"You Know, for Search\"\n" +
"\"tagline\": \"The OpenSearch Project: https://opensearch.org/\"\n" +
"}";

NetworkClient mock = Mockito.mock(NetworkClient.class);
Expand All @@ -372,35 +297,7 @@ public void testMainInfoWithClusterTooOld() {
client.mainInfo();
fail("Shouldn't operate on main version that is too old.");
} catch (OpenSearchHadoopIllegalStateException e) {
assertEquals("Invalid major version [2.0.0]. Version is lower than minimum required version [6.x].", e.getMessage());
}
}

@Test
public void testMainInfoWithoutRequiredHeaders() {
String response = "{\n" +
"\"name\": \"node\",\n" +
"\"cluster_name\": \"cluster\",\n" +
"\"cluster_uuid\": \"uuid\",\n" +
"\"version\": {\n" +
" \"number\": \"7.14.0\"\n" +
"},\n" +
"\"tagline\": \"You Know, for Search\"\n" +
"}";

NetworkClient mock = Mockito.mock(NetworkClient.class);
Mockito.when(mock.execute(Mockito.any(SimpleRequest.class), Mockito.eq(true)))
.thenReturn(new SimpleResponse(201, new FastByteArrayInputStream(new BytesArray(response)), "localhost:9200"));

RestClient client = new RestClient(new TestSettings(), mock);

try {
client.mainInfo();
fail("Shouldn't operate on request that does not contain the required headers.");
} catch (OpenSearchHadoopTransportException e) {
assertEquals("Connected, but could not verify server is Elasticsearch. Response missing [X-elastic-product] header. " +
"Please check that you are connecting to an Elasticsearch instance, and that any networking filters are " +
"preserving that header.", e.getMessage());
assertEquals("Invalid major version [1.0.0]. Version is lower than minimum required version [2.x].", e.getMessage());
}
}

Expand All @@ -410,9 +307,9 @@ public void testMainInfoWithClusterNotProvidingUUID() {
"\"name\": \"node\",\n" +
"\"cluster_name\": \"cluster\",\n" +
"\"version\": {\n" +
" \"number\": \"6.0.1\"\n" +
" \"number\": \"2.4.0\"\n" +
"},\n" +
"\"tagline\": \"You Know, for Search\"\n" +
"\"tagline\": \"The OpenSearch Project: https://opensearch.org/\"\n" +
"}";

NetworkClient mock = Mockito.mock(NetworkClient.class);
Expand All @@ -434,9 +331,9 @@ public void testMainInfoWithClusterProvidingUUID() {
"\"cluster_name\": \"cluster\",\n" +
"\"cluster_uuid\": \"uuid\",\n" +
"\"version\": {\n" +
" \"number\": \"6.7.0\"\n" +
" \"number\": \"2.4.0\"\n" +
"},\n" +
"\"tagline\": \"You Know, for Search\"\n" +
"\"tagline\": \"The OpenSearch Project: https://opensearch.org/\"\n" +
"}";

NetworkClient mock = Mockito.mock(NetworkClient.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public void setUp() throws Exception {
inputEntry = IOUtils.asString(getClass().getResourceAsStream("/org/opensearch/hadoop/rest/bulk-retry-input-template.json"));

testSettings = new TestSettings();
testSettings.setResourceWrite("foo/bar");
testSettings.setResourceWrite("foo");
testSettings.setInternalClusterInfo(esClusterInfo);
testSettings.setProperty(ConfigurationOptions.OPENSEARCH_BATCH_SIZE_ENTRIES, "10");
testSettings.setProperty(ConfigurationOptions.ES_BATCH_WRITE_RETRY_WAIT, "1ms");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,15 +221,15 @@ public void testIdPattern() throws Exception {
assumeFalse(isDeleteOP() && jsonInput);
assumeFalse(ConfigurationOptions.ES_OPERATION_UPSERT.equals(operation));
Settings settings = settings();
if (version.onOrAfter(OpenSearchMajorVersion.V_3_X)) {
if (version.onOrAfter(OpenSearchMajorVersion.V_2_X)) {
settings.setResourceWrite("{n}");
} else {
settings.setResourceWrite("foo/{n}");
}

create(settings).write(data).copyTo(ba);
String header;
if (version.onOrAfter(OpenSearchMajorVersion.V_3_X)) {
if (version.onOrAfter(OpenSearchMajorVersion.V_2_X)) {
header = "{\"_index\":\"1\"" + (isUpdateOp() ? ",\"_id\":2" : "") + "}";
} else {
header = "{\"_index\":\"foo\",\"_type\":\"1\"" + (isUpdateOp() ? ",\"_id\":2" : "") + "}";
Expand Down Expand Up @@ -360,7 +360,7 @@ private Settings settings() {
if (version.onOrAfter(OpenSearchMajorVersion.V_3_X)) {
set.setResourceWrite("foo");
} else {
set.setResourceWrite("foo/bar");
set.setResourceWrite("foo");
}
if (isUpdateOp()) {
set.setProperty(ConfigurationOptions.ES_MAPPING_ID, "<2>");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,6 @@
import static org.junit.Assert.assertTrue;

public class NodeInfoTest {
@Test
public void testV1() throws Exception {
Map<String, NodeInfo> nodeMap = testNodeInfo(getClass().getResourceAsStream("client-nodes-v1.json"));
assertFalse(nodeMap.get("Darkhawk").isIngest());
assertFalse(nodeMap.get("Unseen").isIngest());
}

@Test
public void testV2() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ public class DefaultIndexExtractorTest {
@Test
public void createFieldExtractor() {
Settings settings = new TestSettings();
settings.setResourceWrite("test/{field}");
settings.setInternalVersion(OpenSearchMajorVersion.V_2_X);
settings.setResourceWrite("{field}");
settings.setInternalVersion(OpenSearchMajorVersion.V_3_X);
InitializationUtils.setFieldExtractorIfNotSet(settings, MapFieldExtractor.class, LOG);

IndexExtractor iformat = ObjectUtils.instantiate(settings.getMappingIndexExtractorClassName(), settings);
Expand All @@ -69,14 +69,14 @@ public void createFieldExtractor() {
data.put("field", "data");

Object field = iformat.field(data);
assertThat(field.toString(), equalTo("\"_index\":\"test\",\"_type\":\"data\""));
assertThat(field.toString(), equalTo("\"_index\":\"data\""));
}

@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void createFieldExtractorNull() {
Settings settings = new TestSettings();
settings.setResourceWrite("test/{field}");
settings.setInternalVersion(OpenSearchMajorVersion.V_2_X);
settings.setInternalVersion(OpenSearchMajorVersion.V_3_X);
InitializationUtils.setFieldExtractorIfNotSet(settings, MapFieldExtractor.class, LOG);

IndexExtractor iformat = ObjectUtils.instantiate(settings.getMappingIndexExtractorClassName(), settings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,23 +42,6 @@

public class JsonFieldExtractorsTest {

@Test
public void indexAndType() {
Settings settings = new TestSettings();
// Types will not be supported in 8.x
settings.setInternalVersion(OpenSearchMajorVersion.V_2_X);
settings.setResourceWrite("test/{field}");
JsonFieldExtractors jsonFieldExtractors = new JsonFieldExtractors(settings);

String data = "{\"field\":\"data\"}";
BytesArray bytes = new BytesArray(data);

jsonFieldExtractors.process(bytes);

assertThat(jsonFieldExtractors.indexAndType().hasPattern(), is(true));
assertThat(jsonFieldExtractors.indexAndType().field(data).toString(), equalTo("\"_index\":\"test\",\"_type\":\"data\""));
}

@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void indexAndTypeNull() {
Settings settings = new TestSettings();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"ok" : true,
"cluster_name" : "elasticsearch",
"cluster_name" : "opensearch",
"nodes" : {
"fiR5azTbTDiX59m78yzOTw" : {
"name" : "Darkhawk",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
{
"ok" : true,
"cluster_name" : "elasticsearch",
"cluster_name" : "opensearch",
"nodes" : {
"fiR5azTbTDiX59m78yzOTw" : {
"name" : "Darkhawk",
"transport_address" : "inet[/192.168.1.50:9300]",
"host" : "cerberus",
"version" : "2.3.3",
"attributes" : {
"data" : "false",
"master" : "false"
},
"roles" : [],
"http" : {
"bound_address" : "inet[/0:0:0:0:0:0:0:0:9200]",
"publish_address" : "inet[/192.168.1.50:9200]",
Expand All @@ -22,7 +19,11 @@
"name" : "Unseen",
"transport_address" : "inet[/192.168.1.50:9301]",
"host" : "cerberus",
"version" : "2.3.3",
"version" : "3.3.3",
"roles" : [
"master",
"data"
],
"http_address" : "inet[/192.168.1.50:9201]",
"http" : {
"bound_address" : "inet[/0:0:0:0:0:0:0:0%0:9201]",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
{
"ok" : true,
"cluster_name" : "elasticsearch",
"cluster_name" : "opensearch",
"nodes" : {
"fiR5azTbTDiX59m78yzOTw" : {
"name" : "Darkhawk",
"transport_address" : "192.168.1.50:9300",
"host" : "cerberus",
"version" : "5.0.0-alpha5",
"version" : "3.0.0-alpha5",
"roles" : [],
"http" : {
"bound_address" : "0:0:0:0:0:0:0:0:9200",
Expand All @@ -19,7 +19,7 @@
"name" : "Unseen",
"transport_address" : "192.168.1.50:9301",
"host" : "cerberus",
"version" : "5.0.0-alpha5",
"version" : "3.0.0-alpha5",
"roles" : [
"master",
"data",
Expand Down