diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java index bbfc2829147..91c06530d4d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java @@ -18,153 +18,138 @@ package org.apache.drill.exec.expr.fn.impl.conv; -import io.netty.buffer.DrillBuf; - -import javax.inject.Inject; - import org.apache.drill.exec.expr.DrillSimpleFunc; import org.apache.drill.exec.expr.annotations.FunctionTemplate; import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.NullableVarBinaryHolder; import org.apache.drill.exec.expr.holders.NullableVarCharHolder; -import org.apache.drill.exec.expr.holders.VarBinaryHolder; -import org.apache.drill.exec.expr.holders.VarCharHolder; +import org.apache.drill.exec.physical.resultSet.ResultSetLoader; +import org.apache.drill.exec.server.options.OptionManager; +import org.apache.drill.exec.vector.complex.writer.BaseWriter; import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter; +import javax.inject.Inject; +@SuppressWarnings("unused") public class JsonConvertFrom { - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JsonConvertFrom.class); + private JsonConvertFrom() {} - private JsonConvertFrom() { - } + @FunctionTemplate(name = "convert_fromJSON", + scope = FunctionScope.SIMPLE, nulls = NullHandling.INTERNAL) + public static class ConvertFromJsonNullableInput implements DrillSimpleFunc { - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) - public static class ConvertFromJson implements DrillSimpleFunc { + @Param + NullableVarBinaryHolder in; - @Param VarBinaryHolder in; - @Inject DrillBuf buffer; - @Workspace org.apache.drill.exec.vector.complex.fn.JsonReader jsonReader; + @Output // TODO Remove in future work + BaseWriter.ComplexWriter writer; - @Output ComplexWriter writer; + @Inject + OptionManager options; + + @Inject + ResultSetLoader rsLoader; + + @Workspace + org.apache.drill.exec.store.easy.json.loader.ClosingStreamIterator streamIter; + + @Workspace + org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl jsonLoader; @Override public void setup() { - jsonReader = new org.apache.drill.exec.vector.complex.fn.JsonReader.Builder(buffer) - .defaultSchemaPathColumns() - .build(); + streamIter = new org.apache.drill.exec.store.easy.json.loader.ClosingStreamIterator(); + rsLoader.startBatch(); } @Override public void eval() { - try { - jsonReader.setSource(in.start, in.end, in.buffer); - jsonReader.write(writer); - buffer = jsonReader.getWorkBuf(); - } catch (Exception e) { - throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); + // If the input is null or empty, return an empty map + if (in.isSet == 0 || in.start == in.end) { + return; } - } - } - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) - public static class ConvertFromJsonVarchar implements DrillSimpleFunc { + java.io.InputStream inputStream = org.apache.drill.exec.vector.complex.fn.DrillBufInputStream.getStream(in.start, in.end, in.buffer); - @Param VarCharHolder in; - @Inject DrillBuf buffer; - @Workspace org.apache.drill.exec.vector.complex.fn.JsonReader jsonReader; + try { + streamIter.setValue(inputStream); - @Output ComplexWriter writer; + if (jsonLoader == null) { + jsonLoader = org.apache.drill.exec.expr.fn.impl.conv.JsonConverterUtils.createJsonLoader(rsLoader, options, streamIter); + } - @Override - public void setup() { - jsonReader = new org.apache.drill.exec.vector.complex.fn.JsonReader.Builder(buffer) - .defaultSchemaPathColumns() - .build(); - } + org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter = rsLoader.writer(); + rowWriter.start(); + if (jsonLoader.parser().next()) { + rowWriter.save(); + } + //inputStream.close(); - @Override - public void eval() { - try { - jsonReader.setSource(in.start, in.end, in.buffer); - jsonReader.write(writer); - buffer = jsonReader.getWorkBuf(); } catch (Exception e) { - throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); + throw org.apache.drill.common.exceptions.UserException.dataReadError(e) + .message("Error while reading JSON. ") + .addContext(e.getMessage()) + .build(); } } } - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) - public static class ConvertFromJsonNullableInput implements DrillSimpleFunc { + @FunctionTemplate(name = "convert_fromJSON", + scope = FunctionScope.SIMPLE, nulls = NullHandling.INTERNAL) + public static class ConvertFromJsonVarcharInput implements DrillSimpleFunc { - @Param NullableVarBinaryHolder in; - @Inject DrillBuf buffer; - @Workspace org.apache.drill.exec.vector.complex.fn.JsonReader jsonReader; + @Param + NullableVarCharHolder in; - @Output ComplexWriter writer; + @Output // TODO Remove in future work + ComplexWriter writer; - @Override - public void setup() { - jsonReader = new org.apache.drill.exec.vector.complex.fn.JsonReader.Builder(buffer) - .defaultSchemaPathColumns() - .build(); - } + @Workspace + org.apache.drill.exec.store.easy.json.loader.ClosingStreamIterator streamIter; - @Override - public void eval() { - if (in.isSet == 0) { - // Return empty map - org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = writer.rootAsMap(); - mapWriter.start(); - mapWriter.end(); - return; - } + @Inject + OptionManager options; - try { - jsonReader.setSource(in.start, in.end, in.buffer); - jsonReader.write(writer); - buffer = jsonReader.getWorkBuf(); - } catch (Exception e) { - throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); - } - } - } + @Inject + ResultSetLoader rsLoader; - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) - public static class ConvertFromJsonVarcharNullableInput implements DrillSimpleFunc { - - @Param NullableVarCharHolder in; - @Inject DrillBuf buffer; - @Workspace org.apache.drill.exec.vector.complex.fn.JsonReader jsonReader; - - @Output ComplexWriter writer; + @Workspace + org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl jsonLoader; @Override public void setup() { - jsonReader = new org.apache.drill.exec.vector.complex.fn.JsonReader.Builder(buffer) - .defaultSchemaPathColumns() - .build(); + streamIter = new org.apache.drill.exec.store.easy.json.loader.ClosingStreamIterator(); + rsLoader.startBatch(); } @Override public void eval() { - if (in.isSet == 0) { - // Return empty map - org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = writer.rootAsMap(); - mapWriter.start(); - mapWriter.end(); + // If the input is null or empty, return an empty map + if (in.isSet == 0 || in.start == in.end) { return; } + java.io.InputStream inputStream = org.apache.drill.exec.vector.complex.fn.DrillBufInputStream.getStream(in.start, in.end, in.buffer); + try { - jsonReader.setSource(in.start, in.end, in.buffer); - jsonReader.write(writer); - buffer = jsonReader.getWorkBuf(); + streamIter.setValue(inputStream); + if (jsonLoader == null) { + jsonLoader = org.apache.drill.exec.expr.fn.impl.conv.JsonConverterUtils.createJsonLoader(rsLoader, options, streamIter); + } + org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter = rsLoader.writer(); + rowWriter.start(); + if (jsonLoader.parser().next()) { + rowWriter.save(); + } } catch (Exception e) { - throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); + throw org.apache.drill.common.exceptions.UserException.dataReadError(e) + .message("Error while reading JSON. ") + .addContext(e.getMessage()) + .build(); } } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConverterUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConverterUtils.java new file mode 100644 index 00000000000..be14dc1efa3 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConverterUtils.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.drill.exec.expr.fn.impl.conv; + + +import org.apache.drill.exec.physical.resultSet.ResultSetLoader; +import org.apache.drill.exec.server.options.OptionManager; +import org.apache.drill.exec.store.easy.json.loader.ClosingStreamIterator; +import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl; +import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder; + + +public class JsonConverterUtils { + + /** + * Creates a {@link JsonLoaderImpl} for use in JSON conversion UDFs. + * @param rsLoader The {@link ResultSetLoader} used in the UDF + * @param options The {@link OptionManager} used in the UDF. This is used to extract the global JSON options + * @param stream An input stream containing the input JSON data + * @return A {@link JsonLoaderImpl} for use in the UDF. + */ + public static JsonLoaderImpl createJsonLoader(ResultSetLoader rsLoader, + OptionManager options, + ClosingStreamIterator stream) { + // Add JSON configuration from Storage plugin, if present. + JsonLoaderBuilder jsonLoaderBuilder = new JsonLoaderBuilder() + .resultSetLoader(rsLoader) + .standardOptions(options) + .fromStream(() -> stream); + + return (JsonLoaderImpl) jsonLoaderBuilder.build(); + } +} diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonConversionUDF.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonConversionUDF.java new file mode 100644 index 00000000000..3b3620da1b3 --- /dev/null +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonConversionUDF.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.drill.exec.store.json; + + +import ch.qos.logback.classic.Level; +import org.apache.drill.common.types.TypeProtos.MinorType; +import org.apache.drill.exec.ExecConstants; +import org.apache.drill.exec.physical.impl.project.ProjectRecordBatch; +import org.apache.drill.exec.physical.impl.validate.IteratorValidatorBatchIterator; +import org.apache.drill.exec.physical.rowSet.RowSet; +import org.apache.drill.exec.physical.rowSet.RowSetBuilder; +import org.apache.drill.exec.record.metadata.SchemaBuilder; +import org.apache.drill.exec.record.metadata.TupleMetadata; +import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl; +import org.apache.drill.test.ClusterFixture; +import org.apache.drill.test.ClusterTest; +import org.apache.drill.test.LogFixture; +import org.apache.drill.test.rowSet.RowSetComparison; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + + +public class TestJsonConversionUDF extends ClusterTest { + + protected static LogFixture logFixture; + private final static Level CURRENT_LOG_LEVEL = Level.DEBUG; + @BeforeClass + public static void setup() throws Exception { + logFixture = LogFixture.builder() + .toConsole() + .logger(ProjectRecordBatch.class, CURRENT_LOG_LEVEL) + .logger(JsonLoaderImpl.class, CURRENT_LOG_LEVEL) + .logger(IteratorValidatorBatchIterator.class, CURRENT_LOG_LEVEL) + .build(); + + startCluster(ClusterFixture.builder(dirTestWatcher)); + } + + @Test + public void testConvertFromJsonVarBinary() throws Exception { + client.alterSession(ExecConstants.JSON_READER_NAN_INF_NUMBERS, true); + String sql = "SELECT string_binary(convert_toJSON(convert_fromJSON(columns[1]))) as col FROM cp.`jsoninput/nan_test.csv`"; + RowSet results = client.queryBuilder().sql(sql).rowSet(); + assertEquals("Query result must contain 1 row", 1, results.rowCount()); + results.clear(); + } + + @Test + public void testConvertFromJsonVarChar() throws Exception { + String sql = "SELECT json_data['foo'] AS foo, json_data['num'] AS num FROM " + + "(SELECT convert_FromJSON('{\"foo\":\"bar\", \"num\":10}') as json_data FROM (VALUES(1)))"; + RowSet results = client.queryBuilder().sql(sql).rowSet(); + + TupleMetadata expectedSchema = new SchemaBuilder() + .addNullable("foo", MinorType.VARCHAR) + .addNullable("num", MinorType.BIGINT) + .buildSchema(); + + RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema) + .addRow("bar", 10L) + .build(); + + new RowSetComparison(expected).verifyAndClearAll(results); + } + + @Test + public void testMultipleRows() throws Exception { + String sql = "SELECT string_binary(convert_toJSON(`name`)) FROM cp.`jsoninput/multirow.csvh`"; + + RowSet results = client.queryBuilder().sql(sql).rowSet(); + results.print(); + } +} diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java index e556ec16ea1..91f5e544684 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java @@ -15,33 +15,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.drill.exec.store.json; -import static org.apache.drill.test.TestBuilder.mapOf; import static org.hamcrest.CoreMatchers.containsString; -import static org.junit.Assert.assertEquals; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.fail; import java.io.File; -import java.util.List; +import java.nio.charset.Charset; import org.apache.commons.io.FileUtils; import org.apache.drill.common.exceptions.UserRemoteException; -import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.physical.impl.join.JoinTestBase; -import org.apache.drill.exec.record.RecordBatchLoader; -import org.apache.drill.exec.record.VectorWrapper; -import org.apache.drill.exec.rpc.user.QueryDataBatch; -import org.apache.drill.exec.vector.VarCharVector; import org.apache.drill.exec.store.json.TestJsonReader.TestWrapper; import org.apache.drill.test.BaseTestQuery; -import org.junit.Ignore; import org.junit.Test; -// TODO: Split or rename: this tests mor than NanInf public class TestJsonNanInf extends BaseTestQuery { public void runBoth(TestWrapper wrapper) throws Exception { @@ -66,7 +58,7 @@ private void doTestNanInfSelect() throws Exception { String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}"; String query = String.format("select * from dfs.`%s`",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -79,7 +71,6 @@ private void doTestNanInfSelect() throws Exception { } @Test - @Ignore // see DRILL-6018 public void testExcludePositiveInfinity() throws Exception { runBoth(this::doTestExcludePositiveInfinity); } @@ -91,7 +82,7 @@ private void doTestExcludePositiveInfinity() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select inf_col from dfs.`%s` where inf_col <> cast('Infinity' as double)",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -104,7 +95,6 @@ private void doTestExcludePositiveInfinity() throws Exception { } @Test - @Ignore // see DRILL-6018 public void testExcludeNegativeInfinity() throws Exception { runBoth(this::doTestExcludeNegativeInfinity); } @@ -116,7 +106,7 @@ private void doTestExcludeNegativeInfinity() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select inf_col from dfs.`%s` where inf_col <> cast('-Infinity' as double)",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -129,7 +119,6 @@ private void doTestExcludeNegativeInfinity() throws Exception { } @Test - @Ignore // see DRILL-6018 public void testIncludePositiveInfinity() throws Exception { runBoth(this::doTestIncludePositiveInfinity); } @@ -141,7 +130,7 @@ private void doTestIncludePositiveInfinity() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select inf_col from dfs.`%s` where inf_col = cast('Infinity' as double)",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -166,7 +155,7 @@ private void doTestExcludeNan() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select nan_col from dfs.`%s` where cast(nan_col as varchar) <> 'NaN'",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -190,7 +179,7 @@ private void doTestIncludeNan() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select nan_col from dfs.`%s` where cast(nan_col as varchar) = 'NaN'",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -213,7 +202,7 @@ private void doTestNanInfFailure() throws Exception { test("alter session set `%s` = false", ExecConstants.JSON_READER_NAN_INF_NUMBERS); String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}"; try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("select * from dfs.`%s`;", table); fail(); } catch (UserRemoteException e) { @@ -235,13 +224,13 @@ private void doTestCreateTableNanInf() throws Exception { String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}"; String newTable = "ctas_test"; try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `store.format`='json'"); test("create table dfs.`%s` as select * from dfs.`%s`;", newTable, table); // ensuring that `NaN` and `Infinity` tokens ARE NOT enclosed with double quotes File resultFile = new File(new File(file.getParent(), newTable),"0_0_0.json"); - String resultJson = FileUtils.readFileToString(resultFile); + String resultJson = FileUtils.readFileToString(resultFile, Charset.defaultCharset()); int nanIndex = resultJson.indexOf("NaN"); assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex - 1)); assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex + "NaN".length())); @@ -254,28 +243,6 @@ private void doTestCreateTableNanInf() throws Exception { } } - @Test - public void testConvertFromJsonFunction() throws Exception { - runBoth(this::doTestConvertFromJsonFunction); - } - - private void doTestConvertFromJsonFunction() throws Exception { - String table = "nan_test.csv"; - File file = new File(dirTestWatcher.getRootDir(), table); - String csv = "col_0, {\"nan_col\":NaN}"; - try { - FileUtils.writeStringToFile(file, csv); - testBuilder() - .sqlQuery(String.format("select convert_fromJSON(columns[1]) as col from dfs.`%s`", table)) - .unOrdered() - .baselineColumns("col") - .baselineValues(mapOf("nan_col", Double.NaN)) - .go(); - } finally { - FileUtils.deleteQuietly(file); - } - } - @Test public void testLargeStringBinary() throws Exception { runBoth(() -> doTestLargeStringBinary()); @@ -292,39 +259,6 @@ private void doTestLargeStringBinary() throws Exception { } @Test - public void testConvertToJsonFunction() throws Exception { - runBoth(() -> doTestConvertToJsonFunction()); - } - - private void doTestConvertToJsonFunction() throws Exception { - String table = "nan_test.csv"; - File file = new File(dirTestWatcher.getRootDir(), table); - String csv = "col_0, {\"nan_col\":NaN}"; - String query = String.format("select string_binary(convert_toJSON(convert_fromJSON(columns[1]))) as col " + - "from dfs.`%s` where columns[0]='col_0'", table); - try { - FileUtils.writeStringToFile(file, csv); - List results = testSqlWithResults(query); - RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator()); - assertEquals("Query result must contain 1 row", 1, results.size()); - QueryDataBatch batch = results.get(0); - - batchLoader.load(batch.getHeader().getDef(), batch.getData()); - VectorWrapper vw = batchLoader.getValueAccessorById(VarCharVector.class, batchLoader.getValueVectorId(SchemaPath.getCompoundPath("col")).getFieldIds()); - // ensuring that `NaN` token ARE NOT enclosed with double quotes - String resultJson = vw.getValueVector().getAccessor().getObject(0).toString(); - int nanIndex = resultJson.indexOf("NaN"); - assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex - 1)); - assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex + "NaN".length())); - batch.release(); - batchLoader.clear(); - } finally { - FileUtils.deleteQuietly(file); - } - } - - @Test - @Ignore("DRILL-6018") public void testNanInfLiterals() throws Exception { testBuilder() .sqlQuery(" select sin(cast('NaN' as double)) as sin_col, " + @@ -350,7 +284,7 @@ private void doTestOrderByWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -391,7 +325,7 @@ private void doTestNestedLoopJoinWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -426,7 +360,7 @@ private void doTestHashJoinWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -459,7 +393,7 @@ private void doTestMergeJoinWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -475,11 +409,11 @@ private void doTestMergeJoinWithNaN() throws Exception { } } - private void enableV2Reader(boolean enable) throws Exception { + private void enableV2Reader(boolean enable) { alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable); } - private void resetV2Reader() throws Exception { + private void resetV2Reader() { resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY); } } diff --git a/exec/java-exec/src/test/resources/jsoninput/allTypes.csv b/exec/java-exec/src/test/resources/jsoninput/allTypes.csv new file mode 100644 index 00000000000..7f44072bdd6 --- /dev/null +++ b/exec/java-exec/src/test/resources/jsoninput/allTypes.csv @@ -0,0 +1,8 @@ +col1,"{ + bi: 123, + fl: 123.4, + st: ""foo"", + mp: { a: 10, b: ""bar"" }, + ar: [ 10, 20 ], + nu: null +}" diff --git a/exec/java-exec/src/test/resources/jsoninput/multirow.csvh b/exec/java-exec/src/test/resources/jsoninput/multirow.csvh new file mode 100644 index 00000000000..b207f1462e4 --- /dev/null +++ b/exec/java-exec/src/test/resources/jsoninput/multirow.csvh @@ -0,0 +1,3 @@ +num, name +1, "bob" +4,"steve" diff --git a/exec/java-exec/src/test/resources/jsoninput/nan_test.csv b/exec/java-exec/src/test/resources/jsoninput/nan_test.csv new file mode 100644 index 00000000000..aae95c09099 --- /dev/null +++ b/exec/java-exec/src/test/resources/jsoninput/nan_test.csv @@ -0,0 +1 @@ +col_0, {"nan_col":NaN}