Skip to content

Commit

Permalink
SQL: SYS COLUMNS returns ODBC specific schema (#35870)
Browse files Browse the repository at this point in the history
Due to some unresolvable type conflict between the expected definition
 in JDBC vs ODBC, the driver mode is now passed over so that certain
 command can change their results accordingly (in this case SYS COLUMNS)

Fix #35376
  • Loading branch information
costin authored Nov 26, 2018
1 parent ca9b2b9 commit d291b08
Show file tree
Hide file tree
Showing 6 changed files with 138 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern;
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
import org.elasticsearch.xpack.sql.proto.Mode;
import org.elasticsearch.xpack.sql.session.Rows;
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
import org.elasticsearch.xpack.sql.session.SqlSession;
Expand Down Expand Up @@ -58,21 +59,29 @@ protected NodeInfo<SysColumns> info() {

@Override
public List<Attribute> output() {
return output(false);
}

private List<Attribute> output(boolean odbcCompatible) {
// https://github.com/elastic/elasticsearch/issues/35376
// ODBC expects some fields as SHORT while JDBC as Integer
// which causes conversion issues and CCE
DataType clientBasedType = odbcCompatible ? SHORT : INTEGER;
return asList(keyword("TABLE_CAT"),
keyword("TABLE_SCHEM"),
keyword("TABLE_NAME"),
keyword("COLUMN_NAME"),
field("DATA_TYPE", INTEGER),
field("DATA_TYPE", clientBasedType),
keyword("TYPE_NAME"),
field("COLUMN_SIZE", INTEGER),
field("BUFFER_LENGTH", INTEGER),
field("DECIMAL_DIGITS", INTEGER),
field("NUM_PREC_RADIX", INTEGER),
field("NULLABLE", INTEGER),
field("DECIMAL_DIGITS", clientBasedType),
field("NUM_PREC_RADIX", clientBasedType),
field("NULLABLE", clientBasedType),
keyword("REMARKS"),
keyword("COLUMN_DEF"),
field("SQL_DATA_TYPE", INTEGER),
field("SQL_DATETIME_SUB", INTEGER),
field("SQL_DATA_TYPE", clientBasedType),
field("SQL_DATETIME_SUB", clientBasedType),
field("CHAR_OCTET_LENGTH", INTEGER),
field("ORDINAL_POSITION", INTEGER),
keyword("IS_NULLABLE"),
Expand All @@ -88,11 +97,13 @@ public List<Attribute> output() {

@Override
public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) {
boolean isOdbcClient = session.settings().mode() == Mode.ODBC;
List<Attribute> output = output(isOdbcClient);
String cluster = session.indexResolver().clusterName();

// bail-out early if the catalog is present but differs
if (Strings.hasText(catalog) && !cluster.equals(catalog)) {
listener.onResponse(Rows.empty(output()));
listener.onResponse(Rows.empty(output));
return;
}

Expand All @@ -104,15 +115,15 @@ public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) {
session.indexResolver().resolveAsSeparateMappings(idx, regex, ActionListener.wrap(esIndices -> {
List<List<?>> rows = new ArrayList<>();
for (EsIndex esIndex : esIndices) {
fillInRows(cluster, esIndex.name(), esIndex.mapping(), null, rows, columnMatcher);
fillInRows(cluster, esIndex.name(), esIndex.mapping(), null, rows, columnMatcher, isOdbcClient);
}

listener.onResponse(Rows.of(output(), rows));
listener.onResponse(Rows.of(output, rows));
}, listener::onFailure));
}

static void fillInRows(String clusterName, String indexName, Map<String, EsField> mapping, String prefix, List<List<?>> rows,
Pattern columnMatcher) {
Pattern columnMatcher, boolean isOdbcClient) {
int pos = 0;
for (Map.Entry<String, EsField> entry : mapping.entrySet()) {
pos++; // JDBC is 1-based so we start with 1 here
Expand All @@ -128,24 +139,24 @@ static void fillInRows(String clusterName, String indexName, Map<String, EsField
null,
indexName,
name,
type.sqlType.getVendorTypeNumber(),
odbcCompatible(type.sqlType.getVendorTypeNumber(), isOdbcClient),
type.esType.toUpperCase(Locale.ROOT),
type.displaySize,
// TODO: is the buffer_length correct?
type.size,
// no DECIMAL support
null,
DataTypes.metaSqlRadix(type),
odbcCompatible(DataTypes.metaSqlRadix(type), isOdbcClient),
// everything is nullable
DatabaseMetaData.columnNullable,
odbcCompatible(DatabaseMetaData.columnNullable, isOdbcClient),
// no remarks
null,
// no column def
null,
// SQL_DATA_TYPE apparently needs to be same as DATA_TYPE except for datetime and interval data types
DataTypes.metaSqlDataType(type),
odbcCompatible(DataTypes.metaSqlDataType(type), isOdbcClient),
// SQL_DATETIME_SUB ?
DataTypes.metaSqlDateTimeSub(type),
odbcCompatible(DataTypes.metaSqlDateTimeSub(type), isOdbcClient),
// char octet length
type.isString() || type == DataType.BINARY ? type.size : null,
// position
Expand All @@ -160,11 +171,18 @@ static void fillInRows(String clusterName, String indexName, Map<String, EsField
));
}
if (field.getProperties() != null) {
fillInRows(clusterName, indexName, field.getProperties(), name, rows, columnMatcher);
fillInRows(clusterName, indexName, field.getProperties(), name, rows, columnMatcher, isOdbcClient);
}
}
}

private static Object odbcCompatible(Integer value, boolean isOdbcClient) {
if (isOdbcClient && value != null) {
return Short.valueOf(value.shortValue());
}
return value;
}

@Override
public int hashCode() {
return Objects.hash(catalog, index, pattern, columnPattern);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ public static void operation(PlanExecutor planExecutor, SqlQueryRequest request,
// The configuration is always created however when dealing with the next page, only the timeouts are relevant
// the rest having default values (since the query is already created)
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(),
request.filter());
request.filter(), request.mode());

// mode() shouldn't be null
QueryMetric metric = QueryMetric.from(request.mode(), request.clientId());
planExecutor.metrics().total(metric);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ protected void doExecute(Task task, SqlTranslateRequest request, ActionListener<

planExecutor.metrics().translate();
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(),
request.requestTimeout(), request.pageTimeout(), request.filter());
request.requestTimeout(), request.pageTimeout(), request.filter(), request.mode());

planExecutor.searchSource(cfg, request.query(), request.params(), ActionListener.wrap(
searchSourceBuilder -> listener.onResponse(new SqlTranslateResponse(searchSourceBuilder)), listener::onFailure));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,29 +8,32 @@
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.sql.proto.Mode;
import org.elasticsearch.xpack.sql.proto.Protocol;

import java.util.TimeZone;

// Typed object holding properties for a given action
public class Configuration {
public static final Configuration DEFAULT = new Configuration(TimeZone.getTimeZone("UTC"),
Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null);
Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN);

private TimeZone timeZone;
private int pageSize;
private TimeValue requestTimeout;
private TimeValue pageTimeout;
private final TimeZone timeZone;
private final int pageSize;
private final TimeValue requestTimeout;
private final TimeValue pageTimeout;
private final Mode mode;

@Nullable
private QueryBuilder filter;

public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter) {
public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode) {
this.timeZone = tz;
this.pageSize = pageSize;
this.requestTimeout = requestTimeout;
this.pageTimeout = pageTimeout;
this.filter = filter;
this.mode = mode == null ? Mode.PLAIN : mode;
}

public TimeZone timeZone() {
Expand All @@ -52,4 +55,8 @@ public TimeValue pageTimeout() {
public QueryBuilder filter() {
return filter;
}

public Mode mode() {
return mode;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ public class SysColumnsTests extends ESTestCase {

public void testSysColumns() {
List<List<?>> rows = new ArrayList<>();
SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null);
SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, false);
assertEquals(16, rows.size());
assertEquals(24, rows.get(0).size());

Expand Down Expand Up @@ -58,6 +58,70 @@ public void testSysColumns() {
assertEquals(Integer.MAX_VALUE, bufferLength(row));
}

public void testSysColumnsInOdbcMode() {
List<List<?>> rows = new ArrayList<>();
SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, true);
assertEquals(16, rows.size());
assertEquals(24, rows.get(0).size());

List<?> row = rows.get(0);
assertEquals("bool", name(row));
assertEquals((short) Types.BOOLEAN, sqlType(row));
assertEquals(null, radix(row));
assertEquals(1, bufferLength(row));

row = rows.get(1);
assertEquals("int", name(row));
assertEquals((short) Types.INTEGER, sqlType(row));
assertEquals(Short.class, radix(row).getClass());
assertEquals(4, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());

row = rows.get(2);
assertEquals("text", name(row));
assertEquals((short) Types.VARCHAR, sqlType(row));
assertEquals(null, radix(row));
assertEquals(Integer.MAX_VALUE, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());

row = rows.get(4);
assertEquals("date", name(row));
assertEquals((short) Types.TIMESTAMP, sqlType(row));
assertEquals(null, radix(row));
assertEquals(24, precision(row));
assertEquals(8, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());

row = rows.get(7);
assertEquals("some.dotted", name(row));
assertEquals((short) Types.STRUCT, sqlType(row));
assertEquals(null, radix(row));
assertEquals(-1, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());

row = rows.get(15);
assertEquals("some.ambiguous.normalized", name(row));
assertEquals((short) Types.VARCHAR, sqlType(row));
assertEquals(null, radix(row));
assertEquals(Integer.MAX_VALUE, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());
}

private static Object name(List<?> list) {
return list.get(3);
}
Expand All @@ -74,7 +138,23 @@ private static Object bufferLength(List<?> list) {
return list.get(7);
}

private static Object decimalPrecision(List<?> list) {
return list.get(8);
}

private static Object radix(List<?> list) {
return list.get(9);
}
}

private static Object nullable(List<?> list) {
return list.get(10);
}

private static Object sqlDataType(List<?> list) {
return list.get(13);
}

private static Object sqlDataTypeSub(List<?> list) {
return list.get(14);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
import org.elasticsearch.xpack.sql.parser.SqlParser;
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
import org.elasticsearch.xpack.sql.session.Configuration;
import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.stats.Metrics;
import org.elasticsearch.xpack.sql.type.DataType;
Expand Down Expand Up @@ -53,18 +54,18 @@ private Tuple<Command, SqlSession> sql(String sql) {
return Void.TYPE;
}).when(resolver).resolveAsSeparateMappings(any(), any(), any());

SqlSession session = new SqlSession(null, null, null, resolver, null, null, null, null);
SqlSession session = new SqlSession(Configuration.DEFAULT, null, null, resolver, null, null, null, null);
return new Tuple<>(cmd, session);
}

public void testSysTypes() throws Exception {
Command cmd = sql("SYS TYPES").v1();

List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE",
"INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND",
"INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND",
"INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE",
"INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND",
"INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND",
"INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND",
"UNSUPPORTED", "OBJECT", "NESTED");

cmd.execute(null, ActionListener.wrap(r -> {
Expand Down

0 comments on commit d291b08

Please sign in to comment.