diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 480ca074f5..fb3a19cdae 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -65,7 +65,7 @@ repos:
entry: bash -c 'cd go/adbc && golangci-lint run --fix --timeout 5m'
types_or: [go]
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
- rev: v2.3.0
+ rev: v2.12.0
hooks:
- id: pretty-format-golang
- id: pretty-format-java
diff --git a/java/.checker-framework/org.apache.arrow.adapter.jdbc.JdbcToArrowUtils.astub b/java/.checker-framework/org.apache.arrow.adapter.jdbc.JdbcToArrowUtils.astub
new file mode 100644
index 0000000000..0ee9d0ebf7
--- /dev/null
+++ b/java/.checker-framework/org.apache.arrow.adapter.jdbc.JdbcToArrowUtils.astub
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.arrow.adapter.jdbc;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.util.Calendar;
+
+import org.apache.arrow.vector.types.pojo.ArrowType;
+
+public class JdbcToArrowUtils {
+ public static ArrowType getArrowTypeFromJdbcType(JdbcFieldInfo fieldInfo, @Nullable Calendar calendar);
+}
diff --git a/java/.checker-framework/org.apache.arrow.util.AutoCloseables.astub b/java/.checker-framework/org.apache.arrow.util.AutoCloseables.astub
new file mode 100644
index 0000000000..f095df937d
--- /dev/null
+++ b/java/.checker-framework/org.apache.arrow.util.AutoCloseables.astub
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.arrow.util;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+public final class AutoCloseables {
+ public static void close(@Nullable AutoCloseable... autoCloseables) throws Exception;
+}
diff --git a/java/.checker-framework/org.apache.arrow.vector.types.pojo.ArrowType.astub b/java/.checker-framework/org.apache.arrow.vector.types.pojo.ArrowType.astub
new file mode 100644
index 0000000000..8827b847af
--- /dev/null
+++ b/java/.checker-framework/org.apache.arrow.vector.types.pojo.ArrowType.astub
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.arrow.vector.types.pojo;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import org.apache.arrow.vector.types.TimeUnit;
+
+public abstract class ArrowType {
+ public abstract static class PrimitiveType extends ArrowType {
+ }
+
+ public static class Timestamp extends PrimitiveType {
+ public Timestamp(TimeUnit unit, @Nullable String timezone);
+ }
+}
diff --git a/java/.checker-framework/org.junit.jupiter.api.Assumptions.astub b/java/.checker-framework/org.junit.jupiter.api.Assumptions.astub
new file mode 100644
index 0000000000..46ee85fb35
--- /dev/null
+++ b/java/.checker-framework/org.junit.jupiter.api.Assumptions.astub
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.junit.jupiter.api;
+
+import org.checkerframework.dataflow.qual.AssertMethod;
+import org.opentest4j.TestAbortedException;
+
+public class Assumptions {
+ @AssertMethod(value = TestAbortedException.class)
+ public static void assumeTrue(boolean assumption, String message) throws TestAbortedException;
+
+ @AssertMethod(isAssertFalse = true, value = TestAbortedException.class)
+ public static void assumeFalse(boolean assumption, String message) throws TestAbortedException;
+}
diff --git a/java/core/pom.xml b/java/core/pom.xml
index ac499b9a25..f61c686896 100644
--- a/java/core/pom.xml
+++ b/java/core/pom.xml
@@ -33,6 +33,12 @@
arrow-vector
+
+
+ org.checkerframework
+ checker-qual
+
+
org.assertj
diff --git a/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcConnection.java b/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcConnection.java
index c8e897eeee..060c65e816 100644
--- a/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcConnection.java
+++ b/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcConnection.java
@@ -20,6 +20,7 @@
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.ipc.ArrowReader;
import org.apache.arrow.vector.types.pojo.Schema;
+import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A connection to a {@link AdbcDatabase}.
@@ -79,7 +80,7 @@ default ArrowReader readPartition(ByteBuffer descriptor) throws AdbcException {
*
* @param infoCodes The metadata items to fetch.
*/
- ArrowReader getInfo(int[] infoCodes) throws AdbcException;
+ ArrowReader getInfo(int @Nullable [] infoCodes) throws AdbcException;
/**
* Get metadata about the driver/database.
diff --git a/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcDriver.java b/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcDriver.java
index 5e32fd1ed7..797b863586 100644
--- a/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcDriver.java
+++ b/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcDriver.java
@@ -54,6 +54,7 @@ public interface AdbcDriver {
/** ADBC API revision 1.0.0. */
long ADBC_VERSION_1_0_0 = 1_000_000;
+
/** ADBC API revision 1.1.0. */
long ADBC_VERSION_1_1_0 = 1_001_000;
diff --git a/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcException.java b/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcException.java
index dce7570e3d..193bbaa96c 100644
--- a/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcException.java
+++ b/java/core/src/main/java/org/apache/arrow/adbc/core/AdbcException.java
@@ -18,6 +18,7 @@
import java.util.Collection;
import java.util.Collections;
+import org.checkerframework.checker.nullness.qual.Nullable;
/**
* An error in the database or ADBC driver.
@@ -34,20 +35,24 @@
*/
public class AdbcException extends Exception {
private final AdbcStatusCode status;
- private final String sqlState;
+ private final @Nullable String sqlState;
private final int vendorCode;
private Collection details;
public AdbcException(
- String message, Throwable cause, AdbcStatusCode status, String sqlState, int vendorCode) {
+ @Nullable String message,
+ @Nullable Throwable cause,
+ AdbcStatusCode status,
+ @Nullable String sqlState,
+ int vendorCode) {
this(message, cause, status, sqlState, vendorCode, Collections.emptyList());
}
public AdbcException(
- String message,
- Throwable cause,
+ @Nullable String message,
+ @Nullable Throwable cause,
AdbcStatusCode status,
- String sqlState,
+ @Nullable String sqlState,
int vendorCode,
Collection details) {
super(message, cause);
@@ -83,7 +88,7 @@ public AdbcStatusCode getStatus() {
}
/** A SQLSTATE error code, if provided, as defined by the SQL:2003 standard. */
- public String getSqlState() {
+ public @Nullable String getSqlState() {
return sqlState;
}
diff --git a/java/core/src/main/java/org/apache/arrow/adbc/core/ErrorDetail.java b/java/core/src/main/java/org/apache/arrow/adbc/core/ErrorDetail.java
index 13521fb82e..5149b9de77 100644
--- a/java/core/src/main/java/org/apache/arrow/adbc/core/ErrorDetail.java
+++ b/java/core/src/main/java/org/apache/arrow/adbc/core/ErrorDetail.java
@@ -17,6 +17,7 @@
package org.apache.arrow.adbc.core;
import java.util.Objects;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Additional details (not necessarily human-readable) contained in an {@link AdbcException}. */
public class ErrorDetail {
@@ -37,7 +38,7 @@ public Object getValue() {
}
@Override
- public boolean equals(Object o) {
+ public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
diff --git a/java/core/src/main/java/org/apache/arrow/adbc/core/PartitionDescriptor.java b/java/core/src/main/java/org/apache/arrow/adbc/core/PartitionDescriptor.java
index 3f2047801c..5890db9047 100644
--- a/java/core/src/main/java/org/apache/arrow/adbc/core/PartitionDescriptor.java
+++ b/java/core/src/main/java/org/apache/arrow/adbc/core/PartitionDescriptor.java
@@ -18,6 +18,7 @@
import java.nio.ByteBuffer;
import java.util.Objects;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** An opaque descriptor for a part of a potentially distributed or partitioned result set. */
public final class PartitionDescriptor {
@@ -32,7 +33,7 @@ public ByteBuffer getDescriptor() {
}
@Override
- public boolean equals(Object o) {
+ public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
diff --git a/java/core/src/main/java/org/apache/arrow/adbc/core/StandardSchemas.java b/java/core/src/main/java/org/apache/arrow/adbc/core/StandardSchemas.java
index c059bb1b57..31f8ddb97b 100644
--- a/java/core/src/main/java/org/apache/arrow/adbc/core/StandardSchemas.java
+++ b/java/core/src/main/java/org/apache/arrow/adbc/core/StandardSchemas.java
@@ -98,30 +98,64 @@ private StandardSchemas() {
public static final List COLUMN_SCHEMA =
Arrays.asList(
- new Field("column_name", FieldType.notNullable(ArrowType.Utf8.INSTANCE), null),
- new Field("ordinal_position", FieldType.nullable(INT32), null),
- new Field("remarks", FieldType.nullable(ArrowType.Utf8.INSTANCE), null),
- new Field("xdbc_data_type", FieldType.nullable(INT16), null),
- new Field("xdbc_type_name", FieldType.nullable(ArrowType.Utf8.INSTANCE), null),
- new Field("xdbc_column_size", FieldType.nullable(INT32), null),
- new Field("xdbc_decimal_digits", FieldType.nullable(INT16), null),
- new Field("xdbc_num_prec_radix", FieldType.nullable(INT16), null),
- new Field("xdbc_nullable", FieldType.nullable(INT16), null),
- new Field("xdbc_column_def", FieldType.nullable(ArrowType.Utf8.INSTANCE), null),
- new Field("xdbc_sql_data_type", FieldType.nullable(INT16), null),
- new Field("xdbc_datetime_sub", FieldType.nullable(INT16), null),
- new Field("xdbc_char_octet_length", FieldType.nullable(INT32), null),
- new Field("xdbc_is_nullable", FieldType.nullable(ArrowType.Utf8.INSTANCE), null),
- new Field("xdbc_scope_catalog", FieldType.nullable(ArrowType.Utf8.INSTANCE), null),
- new Field("xdbc_scope_schema", FieldType.nullable(ArrowType.Utf8.INSTANCE), null),
- new Field("xdbc_scope_table", FieldType.nullable(ArrowType.Utf8.INSTANCE), null),
- new Field("xdbc_is_autoincrement", FieldType.nullable(ArrowType.Bool.INSTANCE), null),
- new Field("xdbc_is_generatedcolumn", FieldType.nullable(ArrowType.Bool.INSTANCE), null));
+ new Field(
+ "column_name",
+ FieldType.notNullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field("ordinal_position", FieldType.nullable(INT32), Collections.emptyList()),
+ new Field(
+ "remarks", FieldType.nullable(ArrowType.Utf8.INSTANCE), Collections.emptyList()),
+ new Field("xdbc_data_type", FieldType.nullable(INT16), Collections.emptyList()),
+ new Field(
+ "xdbc_type_name",
+ FieldType.nullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field("xdbc_column_size", FieldType.nullable(INT32), Collections.emptyList()),
+ new Field("xdbc_decimal_digits", FieldType.nullable(INT16), Collections.emptyList()),
+ new Field("xdbc_num_prec_radix", FieldType.nullable(INT16), Collections.emptyList()),
+ new Field("xdbc_nullable", FieldType.nullable(INT16), Collections.emptyList()),
+ new Field(
+ "xdbc_column_def",
+ FieldType.nullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field("xdbc_sql_data_type", FieldType.nullable(INT16), Collections.emptyList()),
+ new Field("xdbc_datetime_sub", FieldType.nullable(INT16), Collections.emptyList()),
+ new Field("xdbc_char_octet_length", FieldType.nullable(INT32), Collections.emptyList()),
+ new Field(
+ "xdbc_is_nullable",
+ FieldType.nullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field(
+ "xdbc_scope_catalog",
+ FieldType.nullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field(
+ "xdbc_scope_schema",
+ FieldType.nullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field(
+ "xdbc_scope_table",
+ FieldType.nullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field(
+ "xdbc_is_autoincrement",
+ FieldType.nullable(ArrowType.Bool.INSTANCE),
+ Collections.emptyList()),
+ new Field(
+ "xdbc_is_generatedcolumn",
+ FieldType.nullable(ArrowType.Bool.INSTANCE),
+ Collections.emptyList()));
public static final List TABLE_SCHEMA =
Arrays.asList(
- new Field("table_name", FieldType.notNullable(ArrowType.Utf8.INSTANCE), null),
- new Field("table_type", FieldType.notNullable(ArrowType.Utf8.INSTANCE), null),
+ new Field(
+ "table_name",
+ FieldType.notNullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
+ new Field(
+ "table_type",
+ FieldType.notNullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
new Field(
"table_columns",
FieldType.nullable(ArrowType.List.INSTANCE),
@@ -136,7 +170,10 @@ private StandardSchemas() {
public static final List DB_SCHEMA_SCHEMA =
Arrays.asList(
- new Field("db_schema_name", FieldType.notNullable(ArrowType.Utf8.INSTANCE), null),
+ new Field(
+ "db_schema_name",
+ FieldType.notNullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
new Field(
"db_schema_tables",
FieldType.nullable(ArrowType.List.INSTANCE),
@@ -150,7 +187,10 @@ private StandardSchemas() {
public static final Schema GET_OBJECTS_SCHEMA =
new Schema(
Arrays.asList(
- new Field("catalog_name", FieldType.notNullable(ArrowType.Utf8.INSTANCE), null),
+ new Field(
+ "catalog_name",
+ FieldType.notNullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
new Field(
"catalog_db_schemas",
FieldType.nullable(ArrowType.List.INSTANCE),
@@ -180,7 +220,10 @@ private StandardSchemas() {
public static final List STATISTICS_DB_SCHEMA_SCHEMA =
Arrays.asList(
- new Field("db_schema_name", FieldType.notNullable(ArrowType.Utf8.INSTANCE), null),
+ new Field(
+ "db_schema_name",
+ FieldType.notNullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
new Field(
"db_schema_statistics",
FieldType.notNullable(ArrowType.List.INSTANCE),
@@ -195,7 +238,10 @@ private StandardSchemas() {
public static final Schema GET_STATISTICS_SCHEMA =
new Schema(
Arrays.asList(
- new Field("catalog_name", FieldType.notNullable(ArrowType.Utf8.INSTANCE), null),
+ new Field(
+ "catalog_name",
+ FieldType.notNullable(ArrowType.Utf8.INSTANCE),
+ Collections.emptyList()),
new Field(
"catalog_db_schemas",
FieldType.notNullable(ArrowType.List.INSTANCE),
diff --git a/java/core/src/main/java/org/apache/arrow/adbc/core/TypedKey.java b/java/core/src/main/java/org/apache/arrow/adbc/core/TypedKey.java
index 21523bb429..1f1dda2f4b 100644
--- a/java/core/src/main/java/org/apache/arrow/adbc/core/TypedKey.java
+++ b/java/core/src/main/java/org/apache/arrow/adbc/core/TypedKey.java
@@ -19,6 +19,8 @@
import java.util.Map;
import java.util.Objects;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A typesafe option key.
@@ -45,8 +47,8 @@ public String getKey() {
*
* @throws ClassCastException if the value is of the wrong type.
*/
- public T get(Map options) {
- Object value = options.get(key);
+ public @Nullable T get(Map options) {
+ @Nullable Object value = options.get(key);
if (value == null) {
return null;
}
@@ -59,12 +61,12 @@ public T get(Map options) {
* @param options The options.
* @param value The option value.
*/
- public void set(Map options, T value) {
+ public void set(Map options, @NonNull T value) {
options.put(key, value);
}
@Override
- public boolean equals(Object o) {
+ public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
diff --git a/java/driver-manager/pom.xml b/java/driver-manager/pom.xml
index 8a083a58a8..589bfe40ca 100644
--- a/java/driver-manager/pom.xml
+++ b/java/driver-manager/pom.xml
@@ -28,6 +28,12 @@
adbc-core
+
+
+ org.checkerframework
+ checker-qual
+
+
org.assertj
diff --git a/java/driver-manager/src/main/java/org/apache/arrow/adbc/drivermanager/AdbcDriverManager.java b/java/driver-manager/src/main/java/org/apache/arrow/adbc/drivermanager/AdbcDriverManager.java
index c2e20efd02..5068fb9a1f 100644
--- a/java/driver-manager/src/main/java/org/apache/arrow/adbc/drivermanager/AdbcDriverManager.java
+++ b/java/driver-manager/src/main/java/org/apache/arrow/adbc/drivermanager/AdbcDriverManager.java
@@ -27,6 +27,7 @@
import org.apache.arrow.adbc.core.AdbcException;
import org.apache.arrow.adbc.core.AdbcStatusCode;
import org.apache.arrow.memory.BufferAllocator;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Instantiate connections to ABDC databases generically based on driver name. */
public final class AdbcDriverManager {
@@ -38,10 +39,13 @@ private AdbcDriverManager() {
driverFactoryFunctions = new ConcurrentHashMap<>();
final ServiceLoader serviceLoader =
ServiceLoader.load(AdbcDriverFactory.class);
- serviceLoader.forEach(
- driverFactory ->
- driverFactoryFunctions.putIfAbsent(
- driverFactory.getClass().getCanonicalName(), driverFactory::getDriver));
+ for (AdbcDriverFactory driverFactory : serviceLoader) {
+ final @Nullable String className = driverFactory.getClass().getCanonicalName();
+ if (className == null) {
+ throw new RuntimeException("Class has no canonical name");
+ }
+ driverFactoryFunctions.putIfAbsent(className, driverFactory::getDriver);
+ }
}
/**
@@ -77,7 +81,7 @@ public AdbcDatabase connect(
* fully-qualified class name of an AdbcDriverFactory class.
* @return A function to construct an AdbcDriver from a BufferAllocator, or null if not found.
*/
- Function lookupDriver(String driverFactoryName) {
+ @Nullable Function lookupDriver(String driverFactoryName) {
return driverFactoryFunctions.get(driverFactoryName);
}
diff --git a/java/driver/flight-sql-validation/src/test/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlQuirks.java b/java/driver/flight-sql-validation/src/test/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlQuirks.java
index d3f79889ec..fe01a18cd6 100644
--- a/java/driver/flight-sql-validation/src/test/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlQuirks.java
+++ b/java/driver/flight-sql-validation/src/test/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlQuirks.java
@@ -36,8 +36,8 @@ public class FlightSqlQuirks extends SqlValidationQuirks {
static String getFlightLocation() {
final String location = System.getenv(FLIGHT_SQL_LOCATION_ENV_VAR);
- Assumptions.assumeFalse(
- location == null || location.isEmpty(),
+ Assumptions.assumeTrue(
+ location != null && !location.isEmpty(),
"Flight SQL server not found, set " + FLIGHT_SQL_LOCATION_ENV_VAR);
return location;
}
diff --git a/java/driver/flight-sql/pom.xml b/java/driver/flight-sql/pom.xml
index ceceaa8249..e410c2a14e 100644
--- a/java/driver/flight-sql/pom.xml
+++ b/java/driver/flight-sql/pom.xml
@@ -67,6 +67,12 @@
adbc-sql
+
+
+ org.checkerframework
+ checker-qual
+
+
org.assertj
diff --git a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightInfoReader.java b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightInfoReader.java
index 6850be0639..1de006f204 100644
--- a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightInfoReader.java
+++ b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightInfoReader.java
@@ -34,6 +34,7 @@
import org.apache.arrow.vector.ipc.ArrowReader;
import org.apache.arrow.vector.ipc.message.ArrowRecordBatch;
import org.apache.arrow.vector.types.pojo.Schema;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** An ArrowReader that wraps a FlightInfo. */
public class FlightInfoReader extends ArrowReader {
@@ -45,6 +46,8 @@ public class FlightInfoReader extends ArrowReader {
private FlightStream currentStream;
private long bytesRead;
+ @SuppressWarnings(
+ "method.invocation") // Checker Framework does not like the ensureInitialized call
FlightInfoReader(
BufferAllocator allocator,
FlightSqlClientWithCallOptions client,
@@ -118,8 +121,12 @@ private FlightStream tryLoadNextStream(FlightEndpoint endpoint) throws IOExcepti
Collections.shuffle(locations);
IOException failure = null;
for (final Location location : locations) {
+ final @Nullable FlightSqlClientWithCallOptions client = clientCache.get(location);
+ if (client == null) {
+ throw new IllegalStateException("Could not connect to " + location);
+ }
try {
- return Objects.requireNonNull(clientCache.get(location)).getStream(endpoint.getTicket());
+ return client.getStream(endpoint.getTicket());
} catch (RuntimeException e) {
// Also handles CompletionException (from clientCache#get), FlightRuntimeException
if (failure == null) {
@@ -131,6 +138,9 @@ private FlightStream tryLoadNextStream(FlightEndpoint endpoint) throws IOExcepti
}
}
}
+ if (failure == null) {
+ throw new IllegalStateException("FlightEndpoint had no locations");
+ }
throw Objects.requireNonNull(failure);
}
}
diff --git a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlConnection.java b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlConnection.java
index c079060ec8..586d31e039 100644
--- a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlConnection.java
+++ b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlConnection.java
@@ -55,24 +55,26 @@
import org.apache.arrow.util.AutoCloseables;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.ipc.ArrowReader;
+import org.checkerframework.checker.initialization.qual.UnknownInitialization;
+import org.checkerframework.checker.nullness.qual.Nullable;
public class FlightSqlConnection implements AdbcConnection {
private final BufferAllocator allocator;
- private final AtomicInteger counter;
+ private final AtomicInteger counter = new AtomicInteger(0);
private final FlightSqlClientWithCallOptions client;
private final SqlQuirks quirks;
private final Map parameters;
private final LoadingCache clientCache;
// Cached data to use across additional connections.
- private ClientCookieMiddleware.Factory cookieMiddlewareFactory;
+ private ClientCookieMiddleware.@Nullable Factory cookieMiddlewareFactory;
private CallOption[] callOptions;
// Used to cache the InputStream content as a byte array since
// subsequent connections may need to use it but it is supplied as a stream.
- private byte[] mtlsCertChainBytes;
- private byte[] mtlsPrivateKeyBytes;
- private byte[] tlsRootCertsBytes;
+ private byte @Nullable [] mtlsCertChainBytes;
+ private byte @Nullable [] mtlsPrivateKeyBytes;
+ private byte @Nullable [] tlsRootCertsBytes;
FlightSqlConnection(
BufferAllocator allocator,
@@ -81,16 +83,18 @@ public class FlightSqlConnection implements AdbcConnection {
Map parameters)
throws AdbcException {
this.allocator = allocator;
- this.counter = new AtomicInteger(0);
this.quirks = quirks;
this.parameters = parameters;
+ this.callOptions = new CallOption[0];
FlightSqlClient flightSqlClient = new FlightSqlClient(createInitialConnection(location));
this.client = new FlightSqlClientWithCallOptions(flightSqlClient, callOptions);
this.clientCache =
Caffeine.newBuilder()
.expireAfterAccess(5, TimeUnit.MINUTES)
.removalListener(
- (Location key, FlightSqlClientWithCallOptions value, RemovalCause cause) -> {
+ (@Nullable Location key,
+ @Nullable FlightSqlClientWithCallOptions value,
+ RemovalCause cause) -> {
if (value == null) return;
try {
value.close();
@@ -153,7 +157,7 @@ public AdbcStatement bulkIngest(String targetTableName, BulkIngestMode mode)
}
@Override
- public ArrowReader getInfo(int[] infoCodes) throws AdbcException {
+ public ArrowReader getInfo(int @Nullable [] infoCodes) throws AdbcException {
try (InfoMetadataBuilder builder = new InfoMetadataBuilder(allocator, client, infoCodes)) {
try (final VectorSchemaRoot root = builder.build()) {
return RootArrowReader.fromRoot(allocator, root);
@@ -195,24 +199,28 @@ public String toString() {
* Initialize cached data to share between connections and create, test, and authenticate the
* first connection.
*/
- private FlightClient createInitialConnection(Location location) throws AdbcException {
+ private FlightClient createInitialConnection(
+ @UnknownInitialization FlightSqlConnection this, Location location) throws AdbcException {
// Setup cached pre-connection properties.
try {
- final InputStream mtlsCertChain =
- FlightSqlConnectionProperties.MTLS_CERT_CHAIN.get(parameters);
- if (mtlsCertChain != null) {
- this.mtlsCertChainBytes = inputStreamToBytes(mtlsCertChain);
- }
+ if (parameters != null) {
+ final InputStream mtlsCertChain =
+ FlightSqlConnectionProperties.MTLS_CERT_CHAIN.get(parameters);
+ if (mtlsCertChain != null) {
+ this.mtlsCertChainBytes = inputStreamToBytes(mtlsCertChain);
+ }
- final InputStream mtlsPrivateKey =
- FlightSqlConnectionProperties.MTLS_PRIVATE_KEY.get(parameters);
- if (mtlsPrivateKey != null) {
- this.mtlsPrivateKeyBytes = inputStreamToBytes(mtlsPrivateKey);
- }
+ final InputStream mtlsPrivateKey =
+ FlightSqlConnectionProperties.MTLS_PRIVATE_KEY.get(parameters);
+ if (mtlsPrivateKey != null) {
+ this.mtlsPrivateKeyBytes = inputStreamToBytes(mtlsPrivateKey);
+ }
- final InputStream tlsRootCerts = FlightSqlConnectionProperties.TLS_ROOT_CERTS.get(parameters);
- if (tlsRootCerts != null) {
- this.tlsRootCertsBytes = inputStreamToBytes(tlsRootCerts);
+ final InputStream tlsRootCerts =
+ FlightSqlConnectionProperties.TLS_ROOT_CERTS.get(parameters);
+ if (tlsRootCerts != null) {
+ this.tlsRootCertsBytes = inputStreamToBytes(tlsRootCerts);
+ }
}
} catch (IOException ex) {
throw new AdbcException(
@@ -227,10 +235,12 @@ private FlightClient createInitialConnection(Location location) throws AdbcExcep
0);
}
- final boolean useCookieMiddleware =
- Boolean.TRUE.equals(FlightSqlConnectionProperties.WITH_COOKIE_MIDDLEWARE.get(parameters));
- if (useCookieMiddleware) {
- this.cookieMiddlewareFactory = new ClientCookieMiddleware.Factory();
+ if (parameters != null) {
+ final boolean useCookieMiddleware =
+ Boolean.TRUE.equals(FlightSqlConnectionProperties.WITH_COOKIE_MIDDLEWARE.get(parameters));
+ if (useCookieMiddleware) {
+ this.cookieMiddlewareFactory = new ClientCookieMiddleware.Factory();
+ }
}
// Build the client using the above properties.
@@ -284,7 +294,11 @@ private FlightClient createInitialConnection(Location location) throws AdbcExcep
}
/** Returns a yet-to-be authenticated FlightClient */
- private FlightClient buildClient(Location location) throws AdbcException {
+ private FlightClient buildClient(
+ @UnknownInitialization FlightSqlConnection this, Location location) throws AdbcException {
+ if (allocator == null) {
+ throw new IllegalStateException("Internal error: allocator was not initialized");
+ }
final FlightClient.Builder builder =
FlightClient.builder()
.allocator(
@@ -327,13 +341,15 @@ private FlightClient buildClient(Location location) throws AdbcException {
builder.trustedCertificates(new ByteArrayInputStream(tlsRootCertsBytes));
}
- if (Boolean.TRUE.equals(FlightSqlConnectionProperties.TLS_SKIP_VERIFY.get(parameters))) {
- builder.verifyServer(false);
- }
+ if (parameters != null) {
+ if (Boolean.TRUE.equals(FlightSqlConnectionProperties.TLS_SKIP_VERIFY.get(parameters))) {
+ builder.verifyServer(false);
+ }
- String hostnameOverride = FlightSqlConnectionProperties.TLS_OVERRIDE_HOSTNAME.get(parameters);
- if (hostnameOverride != null) {
- builder.overrideHostname(hostnameOverride);
+ String hostnameOverride = FlightSqlConnectionProperties.TLS_OVERRIDE_HOSTNAME.get(parameters);
+ if (hostnameOverride != null) {
+ builder.overrideHostname(hostnameOverride);
+ }
}
// Setup cookies if needed.
diff --git a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlDriverUtil.java b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlDriverUtil.java
index a4ea23dd0b..ae776d9e87 100644
--- a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlDriverUtil.java
+++ b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlDriverUtil.java
@@ -24,13 +24,18 @@
import org.apache.arrow.adbc.core.ErrorDetail;
import org.apache.arrow.flight.FlightRuntimeException;
import org.apache.arrow.flight.FlightStatusCode;
+import org.checkerframework.checker.nullness.qual.Nullable;
final class FlightSqlDriverUtil {
private FlightSqlDriverUtil() {
throw new AssertionError("Do not instantiate this class");
}
- static String prefixExceptionMessage(final String s) {
+ static String prefixExceptionMessage(final @Nullable String s) {
+ // Allow null since Throwable#getMessage may be null
+ if (s == null) {
+ return "[Flight SQL] (No or unknown error)";
+ }
return "[Flight SQL] " + s;
}
diff --git a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlStatement.java b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlStatement.java
index 77cb2622d1..6a00ca2346 100644
--- a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlStatement.java
+++ b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/FlightSqlStatement.java
@@ -40,6 +40,7 @@
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.Schema;
+import org.checkerframework.checker.nullness.qual.Nullable;
public class FlightSqlStatement implements AdbcStatement {
private final BufferAllocator allocator;
@@ -48,11 +49,11 @@ public class FlightSqlStatement implements AdbcStatement {
private final SqlQuirks quirks;
// State for SQL queries
- private String sqlQuery;
- private FlightSqlClient.PreparedStatement preparedStatement;
+ private @Nullable String sqlQuery;
+ private FlightSqlClient.@Nullable PreparedStatement preparedStatement;
// State for bulk ingest
- private BulkState bulkOperation;
- private VectorSchemaRoot bindRoot;
+ private @Nullable BulkState bulkOperation;
+ private @Nullable VectorSchemaRoot bindRoot;
FlightSqlStatement(
BufferAllocator allocator,
@@ -64,6 +65,9 @@ public class FlightSqlStatement implements AdbcStatement {
this.clientCache = clientCache;
this.quirks = quirks;
this.sqlQuery = null;
+ this.preparedStatement = null;
+ this.bulkOperation = null;
+ this.bindRoot = null;
}
static FlightSqlStatement ingestRoot(
@@ -76,9 +80,7 @@ static FlightSqlStatement ingestRoot(
Objects.requireNonNull(targetTableName);
final FlightSqlStatement statement =
new FlightSqlStatement(allocator, client, clientCache, quirks);
- statement.bulkOperation = new BulkState();
- statement.bulkOperation.mode = mode;
- statement.bulkOperation.targetTable = targetTableName;
+ statement.bulkOperation = new BulkState(mode, targetTableName);
return statement;
}
@@ -96,7 +98,8 @@ public void bind(VectorSchemaRoot root) {
bindRoot = root;
}
- private void createBulkTable() throws AdbcException {
+ private void createBulkTable(BulkState bulkOperation, VectorSchemaRoot bindRoot)
+ throws AdbcException {
final StringBuilder create = new StringBuilder("CREATE TABLE ");
create.append(bulkOperation.targetTable);
create.append(" (");
@@ -128,20 +131,21 @@ private void createBulkTable() throws AdbcException {
}
}
- private UpdateResult executeBulk() throws AdbcException {
+ private UpdateResult executeBulk(BulkState bulkOperation) throws AdbcException {
if (bindRoot == null) {
throw AdbcException.invalidState("[Flight SQL] Must call bind() before bulk insert");
}
+ final VectorSchemaRoot bindParams = bindRoot;
if (bulkOperation.mode == BulkIngestMode.CREATE) {
- createBulkTable();
+ createBulkTable(bulkOperation, bindParams);
}
// XXX: potential injection
final StringBuilder insert = new StringBuilder("INSERT INTO ");
insert.append(bulkOperation.targetTable);
insert.append(" VALUES (");
- for (int col = 0; col < bindRoot.getFieldVectors().size(); col++) {
+ for (int col = 0; col < bindParams.getFieldVectors().size(); col++) {
if (col > 0) {
insert.append(", ");
}
@@ -163,7 +167,7 @@ private UpdateResult executeBulk() throws AdbcException {
}
try {
try {
- statement.setParameters(new NonOwningRoot(bindRoot));
+ statement.setParameters(new NonOwningRoot(bindParams));
statement.executeUpdate();
} finally {
statement.close();
@@ -177,7 +181,7 @@ private UpdateResult executeBulk() throws AdbcException {
}
throw FlightSqlDriverUtil.fromFlightException(e);
}
- return new UpdateResult(bindRoot.getRowCount());
+ return new UpdateResult(bindParams.getRowCount());
}
@FunctionalInterface
@@ -191,13 +195,14 @@ private R execute(
throws AdbcException {
try {
if (preparedStatement != null) {
+ FlightSqlClient.PreparedStatement prepared = preparedStatement;
// TODO: This binds only the LAST row
// See https://lists.apache.org/thread/47zfk3xooojckvfjq2h6ldlqkjrqnsjt
// "[DISC] Flight SQL: clarifying prepared statements with parameters and result sets"
if (bindRoot != null) {
- preparedStatement.setParameters(new NonOwningRoot(bindRoot));
+ prepared.setParameters(new NonOwningRoot(bindRoot));
}
- return doPrepared.execute(preparedStatement);
+ return doPrepared.execute(prepared);
} else {
return doRegular.execute(client);
}
@@ -212,8 +217,8 @@ private FlightInfo executeFlightInfo() throws AdbcException {
} else if (sqlQuery == null) {
throw AdbcException.invalidState("[Flight SQL] Must setSqlQuery() before execute");
}
- return execute(
- FlightSqlClient.PreparedStatement::execute, (client) -> client.execute(sqlQuery));
+ final String query = sqlQuery;
+ return execute(FlightSqlClient.PreparedStatement::execute, (client) -> client.execute(query));
}
@Override
@@ -253,18 +258,20 @@ public Schema executeSchema() throws AdbcException {
} else if (sqlQuery == null) {
throw AdbcException.invalidState("[Flight SQL] Must setSqlQuery() before execute");
}
+ final String query = sqlQuery;
return execute(
FlightSqlClient.PreparedStatement::getResultSetSchema,
- (client) -> client.getExecuteSchema(sqlQuery).getSchema());
+ (client) -> client.getExecuteSchema(query).getSchema());
}
@Override
public UpdateResult executeUpdate() throws AdbcException {
if (bulkOperation != null) {
- return executeBulk();
+ return executeBulk(bulkOperation);
} else if (sqlQuery == null) {
throw AdbcException.invalidState("[Flight SQL] Must setSqlQuery() before executeUpdate");
}
+ final String query = sqlQuery;
long updatedRows =
execute(
(preparedStatement) -> {
@@ -275,7 +282,7 @@ public UpdateResult executeUpdate() throws AdbcException {
throw FlightSqlDriverUtil.fromFlightException(e);
}
},
- (client) -> client.executeUpdate(sqlQuery));
+ (client) -> client.executeUpdate(query));
return new UpdateResult(updatedRows);
}
@@ -303,12 +310,20 @@ public void prepare() throws AdbcException {
@Override
public void close() throws Exception {
- AutoCloseables.close(preparedStatement);
+ // TODO(https://github.com/apache/arrow/issues/39814): this is annotated wrongly upstream
+ if (preparedStatement != null) {
+ AutoCloseables.close(preparedStatement);
+ }
}
private static final class BulkState {
- public BulkIngestMode mode;
+ BulkIngestMode mode;
String targetTable;
+
+ public BulkState(BulkIngestMode mode, String targetTableName) {
+ this.mode = mode;
+ this.targetTable = targetTableName;
+ }
}
/** A VectorSchemaRoot which does not own its data. */
diff --git a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/InfoMetadataBuilder.java b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/InfoMetadataBuilder.java
index aef679f63c..063b70665e 100644
--- a/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/InfoMetadataBuilder.java
+++ b/java/driver/flight-sql/src/main/java/org/apache/arrow/adbc/driver/flightsql/InfoMetadataBuilder.java
@@ -38,6 +38,7 @@
import org.apache.arrow.vector.VarCharVector;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.complex.DenseUnionVector;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Helper class to track state needed to build up the info structure. */
final class InfoMetadataBuilder implements AutoCloseable {
@@ -45,6 +46,7 @@ final class InfoMetadataBuilder implements AutoCloseable {
private static final Map ADBC_TO_FLIGHT_SQL_CODES = new HashMap<>();
private static final Map SUPPORTED_CODES = new HashMap<>();
+ private final BufferAllocator allocator;
private final Collection requestedCodes;
private final FlightSqlClientWithCallOptions client;
private VectorSchemaRoot root;
@@ -80,7 +82,10 @@ interface AddInfo {
}
InfoMetadataBuilder(
- BufferAllocator allocator, FlightSqlClientWithCallOptions client, int[] infoCodes) {
+ BufferAllocator allocator,
+ FlightSqlClientWithCallOptions client,
+ int @Nullable [] infoCodes) {
+ this.allocator = allocator;
if (infoCodes == null) {
this.requestedCodes = new ArrayList<>(SUPPORTED_CODES.keySet());
this.requestedCodes.add(AdbcInfoCode.DRIVER_NAME.getValue());
@@ -145,7 +150,12 @@ VectorSchemaRoot build() throws AdbcException {
root.setRowCount(dstIndex);
VectorSchemaRoot result = root;
- root = null;
+ try {
+ root = VectorSchemaRoot.create(StandardSchemas.GET_INFO_SCHEMA, allocator);
+ } catch (RuntimeException e) {
+ result.close();
+ throw e;
+ }
return result;
}
diff --git a/java/driver/jdbc-validation-postgresql/src/test/java/org/apache/arrow/adbc/driver/jdbc/postgresql/PostgreSqlTypeTest.java b/java/driver/jdbc-validation-postgresql/src/test/java/org/apache/arrow/adbc/driver/jdbc/postgresql/PostgreSqlTypeTest.java
index 5b8357d770..5fd8f17c51 100644
--- a/java/driver/jdbc-validation-postgresql/src/test/java/org/apache/arrow/adbc/driver/jdbc/postgresql/PostgreSqlTypeTest.java
+++ b/java/driver/jdbc-validation-postgresql/src/test/java/org/apache/arrow/adbc/driver/jdbc/postgresql/PostgreSqlTypeTest.java
@@ -85,27 +85,27 @@ protected void timestamp4WithoutTimeZoneType() throws Exception {
protected void timestamp3WithoutTimeZoneType() throws Exception {
final Schema schema = connection.getTableSchema(null, null, "adbc_alltypes");
assertThat(schema.findField("timestamp_without_time_zone_p3_t").getType())
- .isEqualTo(new ArrowType.Timestamp(TimeUnit.MICROSECOND, null));
+ .isEqualTo(new ArrowType.Timestamp(TimeUnit.MILLISECOND, null));
}
@Test
protected void timestamp2WithoutTimeZoneType() throws Exception {
final Schema schema = connection.getTableSchema(null, null, "adbc_alltypes");
assertThat(schema.findField("timestamp_without_time_zone_p2_t").getType())
- .isEqualTo(new ArrowType.Timestamp(TimeUnit.MICROSECOND, null));
+ .isEqualTo(new ArrowType.Timestamp(TimeUnit.MILLISECOND, null));
}
@Test
protected void timestamp1WithoutTimeZoneType() throws Exception {
final Schema schema = connection.getTableSchema(null, null, "adbc_alltypes");
assertThat(schema.findField("timestamp_without_time_zone_p1_t").getType())
- .isEqualTo(new ArrowType.Timestamp(TimeUnit.MICROSECOND, null));
+ .isEqualTo(new ArrowType.Timestamp(TimeUnit.MILLISECOND, null));
}
@Test
protected void timestamp0WithoutTimeZoneType() throws Exception {
final Schema schema = connection.getTableSchema(null, null, "adbc_alltypes");
assertThat(schema.findField("timestamp_without_time_zone_p0_t").getType())
- .isEqualTo(new ArrowType.Timestamp(TimeUnit.MICROSECOND, null));
+ .isEqualTo(new ArrowType.Timestamp(TimeUnit.SECOND, null));
}
}
diff --git a/java/driver/jdbc/pom.xml b/java/driver/jdbc/pom.xml
index 6b9eda4afe..5a415c38f1 100644
--- a/java/driver/jdbc/pom.xml
+++ b/java/driver/jdbc/pom.xml
@@ -51,6 +51,12 @@
adbc-sql
+
+
+ org.checkerframework
+ checker-qual
+
+
org.assertj
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/InfoMetadataBuilder.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/InfoMetadataBuilder.java
index ae5ec226fc..3fcab19c1e 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/InfoMetadataBuilder.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/InfoMetadataBuilder.java
@@ -20,6 +20,7 @@
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
@@ -35,12 +36,14 @@
import org.apache.arrow.vector.VarCharVector;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.complex.DenseUnionVector;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Helper class to track state needed to build up the info structure. */
final class InfoMetadataBuilder implements AutoCloseable {
private static final byte STRING_VALUE_TYPE_ID = (byte) 0;
private static final byte BIGINT_VALUE_TYPE_ID = (byte) 2;
private static final Map SUPPORTED_CODES = new HashMap<>();
+ private final BufferAllocator allocator;
private final Collection requestedCodes;
private final DatabaseMetaData dbmd;
private VectorSchemaRoot root;
@@ -85,12 +88,14 @@ interface AddInfo {
});
}
- InfoMetadataBuilder(BufferAllocator allocator, Connection connection, int[] infoCodes)
+ InfoMetadataBuilder(BufferAllocator allocator, Connection connection, int @Nullable [] infoCodes)
throws SQLException {
- this.requestedCodes =
- infoCodes == null
- ? SUPPORTED_CODES.keySet()
- : IntStream.of(infoCodes).boxed().collect(Collectors.toList());
+ this.allocator = allocator;
+ if (infoCodes == null) {
+ this.requestedCodes = new ArrayList<>(SUPPORTED_CODES.keySet());
+ } else {
+ this.requestedCodes = IntStream.of(infoCodes).boxed().collect(Collectors.toList());
+ }
this.root = VectorSchemaRoot.create(StandardSchemas.GET_INFO_SCHEMA, allocator);
this.dbmd = connection.getMetaData();
this.infoCodes = (UInt4Vector) root.getVector(0);
@@ -130,7 +135,12 @@ VectorSchemaRoot build() throws SQLException {
}
root.setRowCount(rowIndex);
VectorSchemaRoot result = root;
- root = null;
+ try {
+ root = VectorSchemaRoot.create(StandardSchemas.GET_INFO_SCHEMA, allocator);
+ } catch (RuntimeException e) {
+ result.close();
+ throw e;
+ }
return result;
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcArrowReader.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcArrowReader.java
index aba972a9a2..5d53e46162 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcArrowReader.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcArrowReader.java
@@ -32,6 +32,7 @@
import org.apache.arrow.vector.ipc.ArrowReader;
import org.apache.arrow.vector.ipc.message.ArrowRecordBatch;
import org.apache.arrow.vector.types.pojo.Schema;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** An ArrowReader that wraps a JDBC ResultSet. */
public class JdbcArrowReader extends ArrowReader {
@@ -39,7 +40,9 @@ public class JdbcArrowReader extends ArrowReader {
private final Schema schema;
private long bytesRead;
- JdbcArrowReader(BufferAllocator allocator, ResultSet resultSet, Schema overrideSchema)
+ // ensureInitialized() call isn't annotated right
+ @SuppressWarnings({"under.initialization", "method.invocation"})
+ JdbcArrowReader(BufferAllocator allocator, ResultSet resultSet, @Nullable Schema overrideSchema)
throws AdbcException {
super(allocator);
final JdbcToArrowConfig config = makeJdbcConfig(allocator);
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcBindReader.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcBindReader.java
index 167d3f2ddc..0875b610c9 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcBindReader.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcBindReader.java
@@ -31,13 +31,14 @@
import org.apache.arrow.vector.ipc.ArrowReader;
import org.apache.arrow.vector.ipc.message.ArrowRecordBatch;
import org.apache.arrow.vector.types.pojo.Schema;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** An Arrow reader that binds parameters. */
final class JdbcBindReader extends ArrowReader {
private final PreparedStatement statement;
private final JdbcParameterBinder binder;
- private ResultSet currentResultSet;
- private ArrowVectorIterator currentSource;
+ private @Nullable ResultSet currentResultSet;
+ private @Nullable ArrowVectorIterator currentSource;
JdbcBindReader(
BufferAllocator allocator, PreparedStatement statement, VectorSchemaRoot bindParameters) {
@@ -53,6 +54,9 @@ public boolean loadNextBatch() throws IOException {
return false;
}
}
+ if (currentSource == null) {
+ throw new IllegalStateException("Source was null after advancing reader");
+ }
try (final VectorSchemaRoot root = currentSource.next()) {
try (final ArrowRecordBatch batch = new VectorUnloader(root).getRecordBatch()) {
@@ -85,6 +89,9 @@ protected Schema readSchema() throws IOException {
if (!advance()) {
throw new IOException("Parameter set is empty!");
}
+ if (currentResultSet == null) {
+ throw new IllegalStateException("Driver returned null result set");
+ }
return JdbcToArrowUtils.jdbcToArrowSchema(
currentResultSet.getMetaData(), JdbcToArrowUtils.getUtcCalendar());
} catch (SQLException e) {
@@ -95,8 +102,10 @@ protected Schema readSchema() throws IOException {
private boolean advance() throws IOException {
try {
if (binder.next()) {
- if (currentResultSet != null) {
+ if (currentSource != null) {
currentSource.close();
+ }
+ if (currentResultSet != null) {
currentResultSet.close();
}
currentResultSet = statement.executeQuery();
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcConnection.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcConnection.java
index 8f66c154fa..2987815d14 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcConnection.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcConnection.java
@@ -21,6 +21,7 @@
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -49,6 +50,7 @@
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.types.pojo.Schema;
import org.apache.arrow.vector.util.Text;
+import org.checkerframework.checker.nullness.qual.Nullable;
public class JdbcConnection implements AdbcConnection {
private final BufferAllocator allocator;
@@ -90,7 +92,7 @@ public AdbcStatement bulkIngest(String targetTableName, BulkIngestMode mode)
}
@Override
- public ArrowReader getInfo(int[] infoCodes) throws AdbcException {
+ public ArrowReader getInfo(int @Nullable [] infoCodes) throws AdbcException {
try {
try (final VectorSchemaRoot root =
new InfoMetadataBuilder(allocator, connection, infoCodes).build()) {
@@ -134,8 +136,15 @@ static final class Statistic {
short key;
long value;
boolean multiColumn = false;
+
+ public Statistic(String table, String column) {
+ this.table = table;
+ this.column = column;
+ }
}
+ // We use null keys intentionally with HashMap, but the annotations don't technically allow this
+ @SuppressWarnings("argument")
@Override
public ArrowReader getStatistics(
String catalogPattern, String dbSchemaPattern, String tableNamePattern, boolean approximate)
@@ -163,7 +172,7 @@ public ArrowReader getStatistics(
Map>> allStatistics = new HashMap<>();
while (rs.next()) {
- String catalog = rs.getString(1);
+ @Nullable String catalog = rs.getString(1);
String schema = rs.getString(2);
String table = rs.getString(3);
String index = rs.getString(6);
@@ -171,6 +180,15 @@ public ArrowReader getStatistics(
String column = rs.getString(9);
long cardinality = rs.getLong(11);
+ if (table == null || column == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage("JDBC driver returned null table/column name"),
+ null,
+ AdbcStatusCode.INTERNAL,
+ null,
+ 0);
+ }
+
if (!allStatistics.containsKey(catalog)) {
allStatistics.put(catalog, new HashMap<>());
}
@@ -181,7 +199,8 @@ public ArrowReader getStatistics(
}
Map schemaStats = catalogStats.get(schema);
- Statistic statistic = schemaStats.getOrDefault(index, new Statistic());
+ Statistic statistic = schemaStats.getOrDefault(index, new Statistic(table, column));
+ assert statistic != null; // for checker-framework
if (schemaStats.containsKey(index)) {
// Multi-column index, ignore it
statistic.multiColumn = true;
@@ -315,7 +334,10 @@ public Schema getTableSchema(String catalog, String dbSchema, String tableName)
.getMetaData()
.getColumns(catalog, dbSchema, tableName, /*columnNamePattern*/ null)) {
while (rs.next()) {
- final String fieldName = rs.getString("COLUMN_NAME");
+ @Nullable String fieldName = rs.getString("COLUMN_NAME");
+ if (fieldName == null) {
+ fieldName = "";
+ }
final JdbcFieldInfoExtra fieldInfoExtra = new JdbcFieldInfoExtra(rs);
final ArrowType arrowType = quirks.getTypeConverter().apply(fieldInfoExtra);
@@ -329,12 +351,10 @@ public Schema getTableSchema(String catalog, String dbSchema, String tableName)
final Field field =
new Field(
fieldName,
- new FieldType(
- fieldInfoExtra.isNullable() != DatabaseMetaData.columnNoNulls,
- arrowType, /*dictionary*/
- null, /*metadata*/
- null),
- /*children*/ null);
+ fieldInfoExtra.isNullable() == DatabaseMetaData.columnNoNulls
+ ? FieldType.notNullable(arrowType)
+ : FieldType.nullable(arrowType),
+ Collections.emptyList());
fields.add(field);
}
} catch (SQLException e) {
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDataSourceDatabase.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDataSourceDatabase.java
index 6348e05c9c..2f6b7154cb 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDataSourceDatabase.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDataSourceDatabase.java
@@ -26,22 +26,23 @@
import org.apache.arrow.adbc.core.AdbcDatabase;
import org.apache.arrow.adbc.core.AdbcException;
import org.apache.arrow.memory.BufferAllocator;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** An instance of a database based on a {@link DataSource}. */
public final class JdbcDataSourceDatabase implements AdbcDatabase {
private final BufferAllocator allocator;
private final DataSource dataSource;
- private final String username;
- private final String password;
+ private final @Nullable String username;
+ private final @Nullable String password;
private final JdbcQuirks quirks;
private final AtomicInteger counter;
- private Connection connection;
+ private @Nullable Connection connection;
JdbcDataSourceDatabase(
BufferAllocator allocator,
DataSource dataSource,
- String username,
- String password,
+ @Nullable String username,
+ @Nullable String password,
JdbcQuirks quirks)
throws AdbcException {
this.allocator = Objects.requireNonNull(allocator);
@@ -55,12 +56,13 @@ public final class JdbcDataSourceDatabase implements AdbcDatabase {
@Override
public AdbcConnection connect() throws AdbcException {
+ @Nullable Connection conn = this.connection;
try {
- if (connection == null) {
+ if (conn == null) {
if (username != null && password != null) {
- connection = dataSource.getConnection(username, password);
+ conn = this.connection = dataSource.getConnection(username, password);
} else {
- connection = dataSource.getConnection();
+ conn = this.connection = dataSource.getConnection();
}
}
} catch (SQLException e) {
@@ -70,7 +72,7 @@ public AdbcConnection connect() throws AdbcException {
return new JdbcConnection(
allocator.newChildAllocator(
"adbc-jdbc-datasource-connection-" + count, 0, allocator.getLimit()),
- connection,
+ conn,
quirks);
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriver.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriver.java
index 14d4f0bee3..6d5fd74f3d 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriver.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriver.java
@@ -25,13 +25,16 @@
import org.apache.arrow.adbc.core.AdbcException;
import org.apache.arrow.adbc.sql.SqlQuirks;
import org.apache.arrow.memory.BufferAllocator;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** An ADBC driver wrapping the JDBC API. */
public class JdbcDriver implements AdbcDriver {
/** A parameter for creating an {@link AdbcDatabase} from a {@link DataSource}. */
public static final String PARAM_DATASOURCE = "adbc.jdbc.datasource";
+
/** A parameter for specifying backend-specific configuration (type: {@link JdbcQuirks}). */
public static final String PARAM_JDBC_QUIRKS = "adbc.jdbc.quirks";
+
/**
* A parameter for specifying a URI to connect to.
*
@@ -85,8 +88,8 @@ public AdbcDatabase open(Map parameters) throws AdbcException {
return new JdbcDataSourceDatabase(allocator, dataSource, username, password, jdbcQuirks);
}
- private static T getParam(Class klass, Map parameters, String... choices)
- throws AdbcException {
+ private static @Nullable T getParam(
+ Class klass, Map parameters, String... choices) throws AdbcException {
Object result = null;
for (String choice : choices) {
Object value = parameters.get(choice);
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriverUtil.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriverUtil.java
index b2b65d7744..c4f3927726 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriverUtil.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcDriverUtil.java
@@ -23,6 +23,7 @@
import java.util.Set;
import org.apache.arrow.adbc.core.AdbcException;
import org.apache.arrow.adbc.core.AdbcStatusCode;
+import org.checkerframework.checker.nullness.qual.Nullable;
final class JdbcDriverUtil {
// Do our best to properly map database-specific errors to NOT_FOUND status.
@@ -41,11 +42,14 @@ private JdbcDriverUtil() {
throw new AssertionError("Do not instantiate this class");
}
- static String prefixExceptionMessage(final String s) {
+ static String prefixExceptionMessage(final @Nullable String s) {
+ if (s == null) {
+ return "[JDBC] (No or unknown error)";
+ }
return "[JDBC] " + s;
}
- static AdbcStatusCode guessStatusCode(String sqlState) {
+ static AdbcStatusCode guessStatusCode(@Nullable String sqlState) {
if (sqlState == null) {
return AdbcStatusCode.UNKNOWN;
} else if (SQLSTATE_TABLE_NOT_FOUND.contains(sqlState)) {
@@ -56,10 +60,11 @@ static AdbcStatusCode guessStatusCode(String sqlState) {
static AdbcException fromSqlException(SQLException e) {
// Unwrap an unknown exception with a known cause inside of it
- if (isUnknown(e)
- && e.getCause() instanceof SQLException
- && !isUnknown((SQLException) e.getCause())) {
- return fromSqlException((SQLException) e.getCause());
+ if (isUnknown(e)) {
+ final Throwable cause = e.getCause();
+ if (cause instanceof SQLException && !isUnknown((SQLException) cause)) {
+ return fromSqlException((SQLException) cause);
+ }
}
// Only JDBC-prefix the message if it is actually JDBC specific
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcQuirks.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcQuirks.java
index 8702c06f56..39a9e32773 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcQuirks.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcQuirks.java
@@ -20,12 +20,13 @@
import org.apache.arrow.adapter.jdbc.JdbcToArrowUtils;
import org.apache.arrow.adbc.driver.jdbc.adapter.JdbcToArrowTypeConverter;
import org.apache.arrow.adbc.sql.SqlQuirks;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Backend-specific quirks for the JDBC adapter. */
public class JdbcQuirks {
final String backendName;
JdbcToArrowTypeConverter typeConverter;
- SqlQuirks sqlQuirks;
+ @Nullable SqlQuirks sqlQuirks;
public JdbcQuirks(String backendName) {
this.backendName = Objects.requireNonNull(backendName);
@@ -45,7 +46,7 @@ public JdbcToArrowTypeConverter getTypeConverter() {
}
/** The SQL syntax quirks. */
- public SqlQuirks getSqlQuirks() {
+ public @Nullable SqlQuirks getSqlQuirks() {
return sqlQuirks;
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcStatement.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcStatement.java
index fd39e6d08b..339c31adc1 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcStatement.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/JdbcStatement.java
@@ -22,6 +22,7 @@
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
@@ -36,6 +37,7 @@
import org.apache.arrow.adbc.core.AdbcStatement;
import org.apache.arrow.adbc.core.AdbcStatusCode;
import org.apache.arrow.adbc.core.BulkIngestMode;
+import org.apache.arrow.adbc.sql.SqlQuirks;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.util.AutoCloseables;
import org.apache.arrow.vector.VectorSchemaRoot;
@@ -43,6 +45,7 @@
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.Schema;
+import org.checkerframework.checker.nullness.qual.Nullable;
public class JdbcStatement implements AdbcStatement {
private final BufferAllocator allocator;
@@ -50,13 +53,13 @@ public class JdbcStatement implements AdbcStatement {
private final JdbcQuirks quirks;
// State for SQL queries
- private Statement statement;
- private String sqlQuery;
- private ArrowReader reader;
- private ResultSet resultSet;
+ private @Nullable Statement statement;
+ private @Nullable String sqlQuery;
+ private @Nullable ArrowReader reader;
+ private @Nullable ResultSet resultSet;
// State for bulk ingest
- private BulkState bulkOperation;
- private VectorSchemaRoot bindRoot;
+ private @Nullable BulkState bulkOperation;
+ private @Nullable VectorSchemaRoot bindRoot;
JdbcStatement(BufferAllocator allocator, Connection connection, JdbcQuirks quirks) {
this.allocator = allocator;
@@ -73,7 +76,7 @@ static JdbcStatement ingestRoot(
BulkIngestMode mode) {
Objects.requireNonNull(targetTableName);
final JdbcStatement statement = new JdbcStatement(allocator, connection, quirks);
- statement.bulkOperation = new BulkState();
+ statement.bulkOperation = new BulkState(mode, targetTableName);
statement.bulkOperation.mode = mode;
statement.bulkOperation.targetTable = targetTableName;
return statement;
@@ -93,7 +96,8 @@ public void bind(VectorSchemaRoot root) {
bindRoot = root;
}
- private void createBulkTable() throws AdbcException {
+ private void createBulkTable(BulkState bulkOperation, VectorSchemaRoot bindRoot)
+ throws AdbcException {
final StringBuilder create = new StringBuilder("CREATE TABLE ");
create.append(bulkOperation.targetTable);
create.append(" (");
@@ -104,7 +108,13 @@ private void createBulkTable() throws AdbcException {
final Field field = bindRoot.getVector(col).getField();
create.append(field.getName());
create.append(' ');
- String typeName = quirks.getSqlQuirks().getArrowToSqlTypeNameMapping().apply(field.getType());
+ @Nullable SqlQuirks sqlQuirks = quirks.getSqlQuirks();
+ if (sqlQuirks == null) {
+ throw AdbcException.invalidState(
+ JdbcDriverUtil.prefixExceptionMessage(
+ "Must create driver with SqlQuirks to use bulk ingestion"));
+ }
+ String typeName = sqlQuirks.getArrowToSqlTypeNameMapping().apply(field.getType());
if (typeName == null) {
throw AdbcException.notImplemented(
"[JDBC] Cannot generate CREATE TABLE statement for field " + field);
@@ -124,13 +134,14 @@ private void createBulkTable() throws AdbcException {
}
}
- private UpdateResult executeBulk() throws AdbcException {
+ private UpdateResult executeBulk(BulkState bulkOperation) throws AdbcException {
if (bindRoot == null) {
throw AdbcException.invalidState("[JDBC] Must call bind() before bulk insert");
}
+ VectorSchemaRoot bind = bindRoot;
if (bulkOperation.mode == BulkIngestMode.CREATE) {
- createBulkTable();
+ createBulkTable(bulkOperation, bind);
}
// XXX: potential injection
@@ -138,7 +149,7 @@ private UpdateResult executeBulk() throws AdbcException {
final StringBuilder insert = new StringBuilder("INSERT INTO ");
insert.append(bulkOperation.targetTable);
insert.append(" VALUES (");
- for (int col = 0; col < bindRoot.getFieldVectors().size(); col++) {
+ for (int col = 0; col < bind.getFieldVectors().size(); col++) {
if (col > 0) {
insert.append(", ");
}
@@ -156,7 +167,7 @@ private UpdateResult executeBulk() throws AdbcException {
try {
try {
final JdbcParameterBinder binder =
- JdbcParameterBinder.builder(statement, bindRoot).bindAll().build();
+ JdbcParameterBinder.builder(statement, bind).bindAll().build();
statement.clearBatch();
while (binder.next()) {
statement.addBatch();
@@ -168,7 +179,7 @@ private UpdateResult executeBulk() throws AdbcException {
} catch (SQLException e) {
throw JdbcDriverUtil.fromSqlException(e);
}
- return new UpdateResult(bindRoot.getRowCount());
+ return new UpdateResult(bind.getRowCount());
}
private void invalidatePriorQuery() throws AdbcException {
@@ -202,10 +213,12 @@ private void invalidatePriorQuery() throws AdbcException {
@Override
public UpdateResult executeUpdate() throws AdbcException {
if (bulkOperation != null) {
- return executeBulk();
+ return executeBulk(bulkOperation);
} else if (sqlQuery == null) {
throw AdbcException.invalidState("[JDBC] Must setSqlQuery() first");
}
+ String query = sqlQuery;
+
long affectedRows = 0;
try {
invalidatePriorQuery();
@@ -226,7 +239,7 @@ public UpdateResult executeUpdate() throws AdbcException {
statement =
connection.createStatement(
ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
- affectedRows = statement.executeUpdate(sqlQuery);
+ affectedRows = statement.executeUpdate(query);
}
} catch (SQLException e) {
throw JdbcDriverUtil.fromSqlException(e);
@@ -241,6 +254,8 @@ public QueryResult executeQuery() throws AdbcException {
} else if (sqlQuery == null) {
throw AdbcException.invalidState("[JDBC] Must setSqlQuery() first");
}
+ String query = sqlQuery;
+
try {
invalidatePriorQuery();
if (statement instanceof PreparedStatement) {
@@ -255,13 +270,13 @@ public QueryResult executeQuery() throws AdbcException {
statement =
connection.createStatement(
ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
- resultSet = statement.executeQuery(sqlQuery);
+ resultSet = statement.executeQuery(query);
reader = new JdbcArrowReader(allocator, resultSet, /*overrideSchema*/ null);
}
} catch (SQLException e) {
throw JdbcDriverUtil.fromSqlException(e);
}
- return new QueryResult(/*affectedRows=*/ -1, reader);
+ return new QueryResult(/* affectedRows */ -1, reader);
}
@Override
@@ -271,6 +286,8 @@ public Schema executeSchema() throws AdbcException {
} else if (sqlQuery == null) {
throw AdbcException.invalidState("[JDBC] Must setSqlQuery() first");
}
+ String query = sqlQuery;
+
try {
invalidatePriorQuery();
final PreparedStatement preparedStatement;
@@ -283,13 +300,21 @@ public Schema executeSchema() throws AdbcException {
ownedStatement = null;
} else {
// new statement
- preparedStatement = connection.prepareStatement(sqlQuery);
+ preparedStatement = connection.prepareStatement(query);
ownedStatement = preparedStatement;
}
final JdbcToArrowConfig config = JdbcArrowReader.makeJdbcConfig(allocator);
- final Schema schema =
- JdbcToArrowUtils.jdbcToArrowSchema(preparedStatement.getMetaData(), config);
+ @Nullable ResultSetMetaData rsmd = preparedStatement.getMetaData();
+ if (rsmd == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage("JDBC driver returned null ResultSetMetaData"),
+ /*cause*/ null,
+ AdbcStatusCode.INTERNAL,
+ null,
+ 0);
+ }
+ final Schema schema = JdbcToArrowUtils.jdbcToArrowSchema(rsmd, config);
if (ownedStatement != null) {
ownedStatement.close();
}
@@ -332,12 +357,14 @@ public void prepare() throws AdbcException {
if (sqlQuery == null) {
throw AdbcException.invalidArgument("[JDBC] Must setSqlQuery(String) before prepare()");
}
+ String query = sqlQuery;
if (resultSet != null) {
resultSet.close();
}
+
statement =
connection.prepareStatement(
- sqlQuery, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
+ query, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
} catch (SQLException e) {
throw JdbcDriverUtil.fromSqlException(e);
}
@@ -349,7 +376,12 @@ public void close() throws Exception {
}
private static final class BulkState {
- public BulkIngestMode mode;
+ BulkIngestMode mode;
String targetTable;
+
+ BulkState(BulkIngestMode mode, String targetTable) {
+ this.mode = mode;
+ this.targetTable = targetTable;
+ }
}
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/ObjectMetadataBuilder.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/ObjectMetadataBuilder.java
index 909aec59ec..3fefc48369 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/ObjectMetadataBuilder.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/ObjectMetadataBuilder.java
@@ -29,8 +29,9 @@
import java.util.function.Predicate;
import java.util.regex.Pattern;
import org.apache.arrow.adbc.core.AdbcConnection;
+import org.apache.arrow.adbc.core.AdbcException;
+import org.apache.arrow.adbc.core.AdbcStatusCode;
import org.apache.arrow.adbc.core.StandardSchemas;
-import org.apache.arrow.memory.ArrowBuf;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.util.AutoCloseables;
import org.apache.arrow.vector.IntVector;
@@ -43,6 +44,7 @@
import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter;
import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter;
import org.apache.arrow.vector.complex.writer.VarCharWriter;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Helper class to track state needed to build up the object metadata structure. */
final class ObjectMetadataBuilder implements AutoCloseable {
@@ -139,11 +141,19 @@ final class ObjectMetadataBuilder implements AutoCloseable {
this.constraintColumnUsageStructWriter.varChar("fk_column_name");
}
- VectorSchemaRoot build() throws SQLException {
+ VectorSchemaRoot build() throws AdbcException, SQLException {
try (final ResultSet rs = dbmd.getCatalogs()) {
int catalogCount = 0;
while (rs.next()) {
final String catalogName = rs.getString(1);
+ if (catalogName == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage("JDBC driver returned null catalog name"),
+ null,
+ AdbcStatusCode.INVALID_DATA,
+ null,
+ 0);
+ }
if (!catalogPattern.test(catalogName)) continue;
addCatalogRow(catalogCount, catalogName);
catalogCount++;
@@ -156,11 +166,16 @@ VectorSchemaRoot build() throws SQLException {
root.setRowCount(catalogCount);
}
VectorSchemaRoot result = root;
- root = null;
+ try {
+ root = VectorSchemaRoot.create(StandardSchemas.GET_OBJECTS_SCHEMA, allocator);
+ } catch (RuntimeException e) {
+ result.close();
+ throw e;
+ }
return result;
}
- private void addCatalogRow(int rowIndex, String catalogName) throws SQLException {
+ private void addCatalogRow(int rowIndex, String catalogName) throws AdbcException, SQLException {
catalogNames.setSafe(rowIndex, catalogName.getBytes(StandardCharsets.UTF_8));
if (depth == AdbcConnection.GetObjectsDepth.CATALOGS) {
catalogDbSchemas.setNull(rowIndex);
@@ -171,12 +186,20 @@ private void addCatalogRow(int rowIndex, String catalogName) throws SQLException
}
}
- private int buildDbSchemas(int rowIndex, String catalogName) throws SQLException {
+ private int buildDbSchemas(int rowIndex, String catalogName) throws AdbcException, SQLException {
int dbSchemaCount = 0;
// TODO: get tables with no schema
try (final ResultSet rs = dbmd.getSchemas(catalogName, dbSchemaPattern)) {
while (rs.next()) {
final String dbSchemaName = rs.getString(1);
+ if (dbSchemaName == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage("JDBC driver returned null schema name"),
+ null,
+ AdbcStatusCode.INVALID_DATA,
+ null,
+ 0);
+ }
addDbSchemaRow(rowIndex + dbSchemaCount, catalogName, dbSchemaName);
dbSchemaCount++;
}
@@ -185,7 +208,7 @@ private int buildDbSchemas(int rowIndex, String catalogName) throws SQLException
}
private void addDbSchemaRow(int rowIndex, String catalogName, String dbSchemaName)
- throws SQLException {
+ throws AdbcException, SQLException {
dbSchemas.setIndexDefined(rowIndex);
dbSchemaNames.setSafe(rowIndex, dbSchemaName.getBytes(StandardCharsets.UTF_8));
if (depth == AdbcConnection.GetObjectsDepth.DB_SCHEMAS) {
@@ -198,14 +221,23 @@ private void addDbSchemaRow(int rowIndex, String catalogName, String dbSchemaNam
}
private int buildTables(int rowIndex, String catalogName, String dbSchemaName)
- throws SQLException {
+ throws AdbcException, SQLException {
int tableCount = 0;
try (final ResultSet rs =
dbmd.getTables(catalogName, dbSchemaName, tableNamePattern, tableTypesFilter)) {
while (rs.next()) {
- final String tableName = rs.getString(3);
- final String tableType = rs.getString(4);
+ final @Nullable String tableName = rs.getString(3);
+ final @Nullable String tableType = rs.getString(4);
+ if (tableName == null || tableType == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage("JDBC driver returned null table name/type"),
+ null,
+ AdbcStatusCode.INTERNAL,
+ null,
+ 0);
+ }
+
tables.setIndexDefined(rowIndex + tableCount);
tableNames.setSafe(rowIndex + tableCount, tableName.getBytes(StandardCharsets.UTF_8));
tableTypes.setSafe(rowIndex + tableCount, tableType.getBytes(StandardCharsets.UTF_8));
@@ -216,7 +248,7 @@ private int buildTables(int rowIndex, String catalogName, String dbSchemaName)
// 1. Primary keys
try (final ResultSet pk = dbmd.getPrimaryKeys(catalogName, dbSchemaName, tableName)) {
String constraintName = null;
- List constraintColumns = new ArrayList<>();
+ List<@Nullable String> constraintColumns = new ArrayList<>();
while (pk.next()) {
constraintName = pk.getString(6);
String columnName = pk.getString(4);
@@ -232,8 +264,8 @@ private int buildTables(int rowIndex, String catalogName, String dbSchemaName)
// 2. Foreign keys ("imported" keys)
try (final ResultSet fk = dbmd.getImportedKeys(catalogName, dbSchemaName, tableName)) {
- List names = new ArrayList<>();
- List> columns = new ArrayList<>();
+ List<@Nullable String> names = new ArrayList<>();
+ List> columns = new ArrayList<>();
List> references = new ArrayList<>();
while (fk.next()) {
String keyName = fk.getString(12);
@@ -245,11 +277,19 @@ private int buildTables(int rowIndex, String catalogName, String dbSchemaName)
references.add(new ArrayList<>());
}
columns.get(columns.size() - 1).add(keyColumn);
- final ReferencedColumn reference = new ReferencedColumn();
- reference.catalog = fk.getString(1);
- reference.dbSchema = fk.getString(2);
- reference.table = fk.getString(3);
- reference.column = fk.getString(4);
+ final @Nullable String fkTableName = fk.getString(3);
+ final @Nullable String fkColumnName = fk.getString(4);
+ if (fkTableName == null || fkColumnName == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage(
+ "JDBC driver returned null table/column name"),
+ null,
+ AdbcStatusCode.INTERNAL,
+ null,
+ 0);
+ }
+ final ReferencedColumn reference =
+ new ReferencedColumn(fk.getString(1), fk.getString(2), fkTableName, fkColumnName);
references.get(references.size() - 1).add(reference);
}
@@ -261,16 +301,26 @@ private int buildTables(int rowIndex, String catalogName, String dbSchemaName)
// 3. UNIQUE constraints
try (final ResultSet uq =
dbmd.getIndexInfo(catalogName, dbSchemaName, tableName, true, false)) {
- Map> uniqueConstraints = new HashMap<>();
+ Map> uniqueConstraints = new HashMap<>();
while (uq.next()) {
- String constraintName = uq.getString(6);
- String columnName = uq.getString(9);
+ @Nullable String constraintName = uq.getString(6);
+ @Nullable String columnName = uq.getString(9);
int columnIndex = uq.getInt(8);
+ if (constraintName == null || columnName == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage(
+ "JDBC driver returned null constraint/column name"),
+ null,
+ AdbcStatusCode.INTERNAL,
+ null,
+ 0);
+ }
+
if (!uniqueConstraints.containsKey(constraintName)) {
uniqueConstraints.put(constraintName, new ArrayList<>());
}
- ArrayList uniqueColumns = uniqueConstraints.get(constraintName);
+ ArrayList<@Nullable String> uniqueColumns = uniqueConstraints.get(constraintName);
while (uniqueColumns.size() < columnIndex) uniqueColumns.add(null);
uniqueColumns.set(columnIndex - 1, columnName);
}
@@ -299,14 +349,22 @@ private int buildTables(int rowIndex, String catalogName, String dbSchemaName)
}
private int buildColumns(int rowIndex, String catalogName, String dbSchemaName, String tableName)
- throws SQLException {
+ throws AdbcException, SQLException {
int columnCount = 0;
try (final ResultSet rs =
dbmd.getColumns(catalogName, dbSchemaName, tableName, columnNamePattern)) {
while (rs.next()) {
- final String columnName = rs.getString(4);
+ final @Nullable String columnName = rs.getString(4);
+ if (columnName == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage("JDBC driver returned null column name"),
+ null,
+ AdbcStatusCode.INTERNAL,
+ null,
+ 0);
+ }
final int ordinalPosition = rs.getInt(17);
- final String remarks = rs.getString(12);
+ final @Nullable String remarks = rs.getString(12);
final int xdbcDataType = rs.getInt(5);
// TODO: other JDBC metadata
@@ -324,27 +382,28 @@ private int buildColumns(int rowIndex, String catalogName, String dbSchemaName,
return columnCount;
}
- private void writeVarChar(VarCharWriter writer, String value) {
- byte[] bytes = value.getBytes(StandardCharsets.UTF_8);
- try (ArrowBuf tempBuf = allocator.buffer(bytes.length)) {
- tempBuf.setBytes(0, bytes, 0, bytes.length);
- writer.writeVarChar(0, bytes.length, tempBuf);
- }
- }
-
private void addConstraint(
- String constraintName,
+ @Nullable String constraintName,
String constraintType,
- List constraintColumns,
+ List<@Nullable String> constraintColumns,
List referencedColumns) {
tableConstraintsStructWriter.start();
- writeVarChar(this.constraintNamesWriter, constraintName);
- writeVarChar(this.constraintTypesWriter, constraintType);
+ if (constraintName == null) {
+ this.constraintNamesWriter.writeNull();
+ } else {
+ this.constraintNamesWriter.writeVarChar(constraintName);
+ }
+ this.constraintTypesWriter.writeVarChar(constraintType);
constraintColumnNamesWriter.startList();
- for (final String constraintColumn : constraintColumns) {
- writeVarChar(constraintColumnNamesWriter.varChar(), constraintColumn);
+ for (final @Nullable String constraintColumn : constraintColumns) {
+ VarCharWriter writer = constraintColumnNamesWriter.varChar();
+ if (constraintColumn == null) {
+ writer.writeNull();
+ } else {
+ writer.writeVarChar(constraintColumn);
+ }
}
constraintColumnNamesWriter.endList();
@@ -352,11 +411,17 @@ private void addConstraint(
for (ReferencedColumn referencedColumn : referencedColumns) {
constraintColumnUsageStructWriter.start();
if (referencedColumn.catalog != null) {
- writeVarChar(constraintColumnUsageFkCatalogsWriter, referencedColumn.catalog);
+ constraintColumnUsageFkCatalogsWriter.writeVarChar(referencedColumn.catalog);
+ } else {
+ constraintColumnUsageFkCatalogsWriter.writeNull();
+ }
+ if (referencedColumn.dbSchema != null) {
+ constraintColumnUsageFkDbSchemasWriter.writeVarChar(referencedColumn.dbSchema);
+ } else {
+ constraintColumnUsageFkDbSchemasWriter.writeNull();
}
- writeVarChar(constraintColumnUsageFkDbSchemasWriter, referencedColumn.dbSchema);
- writeVarChar(constraintColumnUsageFkTablesWriter, referencedColumn.table);
- writeVarChar(constraintColumnUsageFkColumnsWriter, referencedColumn.column);
+ constraintColumnUsageFkTablesWriter.writeVarChar(referencedColumn.table);
+ constraintColumnUsageFkColumnsWriter.writeVarChar(referencedColumn.column);
constraintColumnUsageStructWriter.end();
}
constraintColumnUsageWriter.endList();
@@ -370,7 +435,7 @@ public void close() throws Exception {
}
/** Turn a SQL-style pattern (%, _) to a regex. */
- String translatePattern(String filter) {
+ static String translatePattern(String filter) {
StringBuilder builder = new StringBuilder(filter.length());
builder.append("^");
for (char c : filter.toCharArray()) {
@@ -387,9 +452,26 @@ String translatePattern(String filter) {
}
static class ReferencedColumn {
- String catalog;
- String dbSchema;
+ @Nullable String catalog;
+ @Nullable String dbSchema;
String table;
String column;
+
+ public ReferencedColumn(
+ @Nullable String catalog, @Nullable String dbSchema, String table, String column)
+ throws AdbcException {
+ this.catalog = catalog;
+ this.dbSchema = dbSchema;
+ if (table == null || column == null) {
+ throw new AdbcException(
+ JdbcDriverUtil.prefixExceptionMessage("JDBC driver returned null table/column name"),
+ null,
+ AdbcStatusCode.INTERNAL,
+ null,
+ 0);
+ }
+ this.table = table;
+ this.column = column;
+ }
}
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/StandardJdbcQuirks.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/StandardJdbcQuirks.java
index e87283cb4e..d4c55566cb 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/StandardJdbcQuirks.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/StandardJdbcQuirks.java
@@ -16,17 +16,15 @@
*/
package org.apache.arrow.adbc.driver.jdbc;
-import java.sql.Types;
-import org.apache.arrow.adapter.jdbc.JdbcToArrowUtils;
-import org.apache.arrow.adbc.driver.jdbc.adapter.JdbcFieldInfoExtra;
+import org.apache.arrow.adbc.driver.jdbc.adapter.JdbcToArrowTypeConverters;
import org.apache.arrow.adbc.sql.SqlQuirks;
-import org.apache.arrow.vector.types.TimeUnit;
-import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
public final class StandardJdbcQuirks {
public static final JdbcQuirks MS_SQL_SERVER =
- JdbcQuirks.builder("Microsoft SQL Server").typeConverter(StandardJdbcQuirks::mssql).build();
+ JdbcQuirks.builder("Microsoft SQL Server")
+ .typeConverter(JdbcToArrowTypeConverters.MICROSOFT_SQL_SERVER)
+ .build();
public static final JdbcQuirks POSTGRESQL =
JdbcQuirks.builder("PostgreSQL")
.sqlQuirks(
@@ -40,41 +38,6 @@ public final class StandardJdbcQuirks {
arrowType);
}))
.build())
- .typeConverter(StandardJdbcQuirks::postgresql)
+ .typeConverter(JdbcToArrowTypeConverters.POSTGRESQL)
.build();
- private static final int MS_SQL_TYPE_DATETIMEOFFSET = -155;
-
- private static ArrowType mssql(JdbcFieldInfoExtra field) {
- switch (field.getJdbcType()) {
- case Types.TIME:
- return MinorType.TIMENANO.getType();
- case Types.TIMESTAMP:
- // DATETIME2
- // Precision is "100 nanoseconds" -> TimeUnit is NANOSECOND
- return MinorType.TIMESTAMPNANO.getType();
- case MS_SQL_TYPE_DATETIMEOFFSET:
- // DATETIMEOFFSET
- // Precision is "100 nanoseconds" -> TimeUnit is NANOSECOND
- return new ArrowType.Timestamp(TimeUnit.NANOSECOND, "UTC");
- default:
- return JdbcToArrowUtils.getArrowTypeFromJdbcType(field.getFieldInfo(), /*calendar*/ null);
- }
- }
-
- private static ArrowType postgresql(JdbcFieldInfoExtra field) {
- switch (field.getJdbcType()) {
- case Types.TIME:
- return MinorType.TIMEMICRO.getType();
- case Types.TIMESTAMP:
- if ("timestamptz".equals(field.getTypeName())) {
- return new ArrowType.Timestamp(TimeUnit.MICROSECOND, "UTC");
- } else if ("timestamp".equals(field.getTypeName())) {
- return MinorType.TIMESTAMPMICRO.getType();
- }
- // Unknown type
- return null;
- default:
- return JdbcToArrowUtils.getArrowTypeFromJdbcType(field.getFieldInfo(), /*calendar*/ null);
- }
- }
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/UrlDataSource.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/UrlDataSource.java
index f74b97f448..51266ca006 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/UrlDataSource.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/UrlDataSource.java
@@ -25,15 +25,17 @@
import java.util.Objects;
import java.util.logging.Logger;
import javax.sql.DataSource;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Adapt a JDBC URL to the DataSource interface. */
class UrlDataSource implements DataSource {
final String target;
- PrintWriter logWriter;
+ @Nullable PrintWriter logWriter;
int loginTimeout;
UrlDataSource(String target) {
this.target = Objects.requireNonNull(target);
+ this.logWriter = null;
}
@Override
@@ -47,7 +49,8 @@ public Connection getConnection(String username, String password) throws SQLExce
}
@Override
- public PrintWriter getLogWriter() throws SQLException {
+ @SuppressWarnings("override.return")
+ public @Nullable PrintWriter getLogWriter() throws SQLException {
return logWriter;
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcFieldInfoExtra.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcFieldInfoExtra.java
index b6741a7cc6..a1e4080f49 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcFieldInfoExtra.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcFieldInfoExtra.java
@@ -20,6 +20,7 @@
import java.sql.ResultSet;
import java.sql.SQLException;
import org.apache.arrow.adapter.jdbc.JdbcFieldInfo;
+import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Information about a column from JDBC for inferring column type.
@@ -31,8 +32,8 @@ public final class JdbcFieldInfoExtra {
final JdbcFieldInfo info;
final String typeName;
final int numPrecRadix;
- final String remarks;
- final String columnDef;
+ final @Nullable String remarks;
+ final @Nullable String columnDef;
final int sqlDataType;
final int sqlDatetimeSub;
final int charOctetLength;
@@ -46,7 +47,11 @@ public final class JdbcFieldInfoExtra {
*/
public JdbcFieldInfoExtra(ResultSet rs) throws SQLException {
final int dataType = rs.getInt("DATA_TYPE");
- this.typeName = rs.getString("TYPE_NAME");
+ final @Nullable String maybeTypeName = rs.getString("TYPE_NAME");
+ if (maybeTypeName == null) {
+ throw new RuntimeException("Field " + "TYPE_NAME" + " was null");
+ }
+ this.typeName = maybeTypeName;
final int columnSize = rs.getInt("COLUMN_SIZE");
final int decimalDigits = rs.getInt("DECIMAL_DIGITS");
this.numPrecRadix = rs.getInt("NUM_PREC_RADIX");
@@ -79,11 +84,11 @@ public int getNumPrecRadix() {
return numPrecRadix;
}
- public String getRemarks() {
+ public @Nullable String getRemarks() {
return remarks;
}
- public String getColumnDef() {
+ public @Nullable String getColumnDef() {
return columnDef;
}
diff --git a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcToArrowTypeConverters.java b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcToArrowTypeConverters.java
index 928bfdf8ee..ba8b6fb3a2 100644
--- a/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcToArrowTypeConverters.java
+++ b/java/driver/jdbc/src/main/java/org/apache/arrow/adbc/driver/jdbc/adapter/JdbcToArrowTypeConverters.java
@@ -20,22 +20,26 @@
import java.sql.Types;
import org.apache.arrow.adapter.jdbc.JdbcToArrowUtils;
import org.apache.arrow.vector.types.TimeUnit;
+import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
public final class JdbcToArrowTypeConverters {
public static final JdbcToArrowTypeConverter MICROSOFT_SQL_SERVER =
JdbcToArrowTypeConverters::mssql;
public static final JdbcToArrowTypeConverter POSTGRESQL = JdbcToArrowTypeConverters::postgresql;
+ private static final int MS_SQL_TYPE_DATETIMEOFFSET = -155;
private static ArrowType mssql(JdbcFieldInfoExtra field) {
switch (field.getJdbcType()) {
+ case Types.TIME:
+ return MinorType.TIMENANO.getType();
+ case Types.TIMESTAMP:
// DATETIME2
// Precision is "100 nanoseconds" -> TimeUnit is NANOSECOND
- case Types.TIMESTAMP:
- return new ArrowType.Timestamp(TimeUnit.NANOSECOND, /*timezone*/ null);
+ return MinorType.TIMESTAMPNANO.getType();
+ case MS_SQL_TYPE_DATETIMEOFFSET:
// DATETIMEOFFSET
// Precision is "100 nanoseconds" -> TimeUnit is NANOSECOND
- case -155:
return new ArrowType.Timestamp(TimeUnit.NANOSECOND, "UTC");
default:
return JdbcToArrowUtils.getArrowTypeFromJdbcType(field.getFieldInfo(), /*calendar*/ null);
@@ -44,6 +48,8 @@ private static ArrowType mssql(JdbcFieldInfoExtra field) {
private static ArrowType postgresql(JdbcFieldInfoExtra field) {
switch (field.getJdbcType()) {
+ case Types.TIME:
+ return MinorType.TIMEMICRO.getType();
case Types.TIMESTAMP:
{
int decimalDigits = field.getScale();
@@ -58,7 +64,8 @@ private static ArrowType postgresql(JdbcFieldInfoExtra field) {
unit = TimeUnit.NANOSECOND;
} else {
// Negative precision?
- return null;
+ throw new UnsupportedOperationException(
+ "Cannot convert type to Arrow Timestamp (precision is negative)");
}
if ("timestamptz".equals(field.getTypeName())) {
return new ArrowType.Timestamp(unit, "UTC");
@@ -66,7 +73,7 @@ private static ArrowType postgresql(JdbcFieldInfoExtra field) {
return new ArrowType.Timestamp(unit, /*timezone*/ null);
}
// Unknown type
- return null;
+ throw new UnsupportedOperationException("Cannot convert type to Arrow Timestamp");
}
default:
return JdbcToArrowUtils.getArrowTypeFromJdbcType(field.getFieldInfo(), /*calendar*/ null);
diff --git a/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractConnectionMetadataTest.java b/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractConnectionMetadataTest.java
index 6a5fc9053d..f31588e68c 100644
--- a/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractConnectionMetadataTest.java
+++ b/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractConnectionMetadataTest.java
@@ -323,8 +323,7 @@ public void getTableSchema() throws Exception {
final Schema schema =
new Schema(
Arrays.asList(
- Field.nullable(
- quirks.caseFoldColumnName("INTS"), new ArrowType.Int(32, /*signed=*/ true)),
+ Field.nullable(quirks.caseFoldColumnName("INTS"), new ArrowType.Int(32, true)),
Field.nullable(quirks.caseFoldColumnName("STRS"), new ArrowType.Utf8())));
try (final VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator);
final AdbcStatement stmt = connection.bulkIngest(tableName, BulkIngestMode.CREATE)) {
diff --git a/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractTransactionTest.java b/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractTransactionTest.java
index 29265ba7e3..e6aaa6e6c4 100644
--- a/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractTransactionTest.java
+++ b/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/AbstractTransactionTest.java
@@ -85,9 +85,7 @@ void toggleAutoCommit() throws Exception {
@Test
void rollback() throws Exception {
final Schema schema =
- new Schema(
- Collections.singletonList(
- Field.nullable("ints", new ArrowType.Int(32, /*signed=*/ true))));
+ new Schema(Collections.singletonList(Field.nullable("ints", new ArrowType.Int(32, true))));
connection.setAutoCommit(false);
try (VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator)) {
@@ -116,9 +114,7 @@ void rollback() throws Exception {
@Test
void commit() throws Exception {
final Schema schema =
- new Schema(
- Collections.singletonList(
- Field.nullable("ints", new ArrowType.Int(32, /*signed=*/ true))));
+ new Schema(Collections.singletonList(Field.nullable("ints", new ArrowType.Int(32, true))));
final String tableName = quirks.caseFoldTableName("temptable");
connection.setAutoCommit(false);
@@ -149,9 +145,7 @@ void commit() throws Exception {
@Test
void enableAutoCommitAlsoCommits() throws Exception {
final Schema schema =
- new Schema(
- Collections.singletonList(
- Field.nullable("ints", new ArrowType.Int(32, /*signed=*/ true))));
+ new Schema(Collections.singletonList(Field.nullable("ints", new ArrowType.Int(32, true))));
final String tableName = quirks.caseFoldTableName("temptable");
connection.setAutoCommit(false);
diff --git a/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/SqlTestUtil.java b/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/SqlTestUtil.java
index 814d0a81c4..c0536e5cf5 100644
--- a/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/SqlTestUtil.java
+++ b/java/driver/validation/src/main/java/org/apache/arrow/adbc/driver/testsuite/SqlTestUtil.java
@@ -79,8 +79,7 @@ public Schema ingestTableWithConstraints(
final Schema schema =
new Schema(
Arrays.asList(
- Field.notNullable(
- quirks.caseFoldColumnName("INTS"), new ArrowType.Int(32, /*signed=*/ true)),
+ Field.notNullable(quirks.caseFoldColumnName("INTS"), new ArrowType.Int(32, true)),
Field.nullable(quirks.caseFoldColumnName("INTS2"), new ArrowType.Int(32, true))));
try (final VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator)) {
final IntVector ints = (IntVector) root.getVector(0);
@@ -133,8 +132,7 @@ public void ingestTablesWithReferentialConstraint(
new Schema(
Collections.singletonList(
Field.notNullable(
- quirks.caseFoldColumnName("PRODUCT_ID"),
- new ArrowType.Int(32, /*signed=*/ true))));
+ quirks.caseFoldColumnName("PRODUCT_ID"), new ArrowType.Int(32, true))));
final Schema dependentSchema =
new Schema(
diff --git a/java/pom.xml b/java/pom.xml
index 5b02f67f6c..5521bb3079 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -135,6 +135,13 @@
${adbc.version}
+
+
+ org.checkerframework
+ checker-qual
+ 3.42.0
+
+
org.assertj
@@ -279,15 +286,26 @@
org.apache.maven.plugins
maven-compiler-plugin
- 3.10.1
+ 3.11.0
1.8
-
+ UTF-8
+
-XDcompilePolicy=simple
-Xplugin:ErrorProne -Xep:NullAway:ERROR -XepOpt:NullAway:AnnotatedPackages=com.uber
+
+ -Xmaxerrs
+ 10000
+ -Xmaxwarns
+ 10000
+ -AskipDefs=.*Test
+ -AatfDoNotCache
+ -AprintVerboseGenerics
+ -AprintAllQualifiers
+ -Astubs=.checker-framework/:stubs
-
+
com.google.errorprone
error_prone_core
@@ -298,7 +316,15 @@
nullaway
0.10.10
+
+ org.checkerframework
+ checker
+ 3.42.0
+
+
+ org.checkerframework.checker.nullness.NullnessChecker
+
diff --git a/java/sql/pom.xml b/java/sql/pom.xml
index 9894210994..37904359b7 100644
--- a/java/sql/pom.xml
+++ b/java/sql/pom.xml
@@ -28,6 +28,12 @@
arrow-vector
+
+
+ org.checkerframework
+ checker-qual
+
+
org.assertj
diff --git a/java/sql/src/main/java/org/apache/arrow/adbc/sql/SqlQuirks.java b/java/sql/src/main/java/org/apache/arrow/adbc/sql/SqlQuirks.java
index 007f699c0f..fc86435474 100644
--- a/java/sql/src/main/java/org/apache/arrow/adbc/sql/SqlQuirks.java
+++ b/java/sql/src/main/java/org/apache/arrow/adbc/sql/SqlQuirks.java
@@ -19,51 +19,53 @@
import java.util.function.Function;
import org.apache.arrow.vector.types.pojo.ArrowType;
+import org.checkerframework.checker.nullness.qual.Nullable;
/** Parameters to pass to SQL-based drivers to account for driver/vendor-specific SQL quirks. */
public final class SqlQuirks {
- public static final Function DEFAULT_ARROW_TYPE_TO_SQL_TYPE_NAME_MAPPING =
- (arrowType) -> {
- switch (arrowType.getTypeID()) {
- case Null:
- case Struct:
- case List:
- case LargeList:
- case FixedSizeList:
- case Union:
- case Map:
- return null;
- case Int:
- // TODO:
- return "INT";
- case FloatingPoint:
- return null;
- case Utf8:
- return "CLOB";
- case LargeUtf8:
- case Binary:
- case LargeBinary:
- case FixedSizeBinary:
- case Bool:
- case Decimal:
- case Date:
- case Time:
- case Timestamp:
- case Interval:
- case Duration:
- case NONE:
- default:
- return null;
- }
- };
- Function arrowToSqlTypeNameMapping;
+ public static final Function
+ DEFAULT_ARROW_TYPE_TO_SQL_TYPE_NAME_MAPPING =
+ (arrowType) -> {
+ switch (arrowType.getTypeID()) {
+ case Null:
+ case Struct:
+ case List:
+ case LargeList:
+ case FixedSizeList:
+ case Union:
+ case Map:
+ return null;
+ case Int:
+ // TODO:
+ return "INT";
+ case FloatingPoint:
+ return null;
+ case Utf8:
+ return "CLOB";
+ case LargeUtf8:
+ case Binary:
+ case LargeBinary:
+ case FixedSizeBinary:
+ case Bool:
+ case Decimal:
+ case Date:
+ case Time:
+ case Timestamp:
+ case Interval:
+ case Duration:
+ case NONE:
+ default:
+ return null;
+ }
+ };
+ Function arrowToSqlTypeNameMapping;
public SqlQuirks() {
this.arrowToSqlTypeNameMapping = DEFAULT_ARROW_TYPE_TO_SQL_TYPE_NAME_MAPPING;
}
/** The mapping from Arrow type to SQL type name, used to build queries. */
- public Function getArrowToSqlTypeNameMapping() {
+ public Function getArrowToSqlTypeNameMapping() {
return arrowToSqlTypeNameMapping;
}
@@ -73,9 +75,10 @@ public static Builder builder() {
}
public static final class Builder {
- Function arrowToSqlTypeNameMapping;
+ @Nullable Function arrowToSqlTypeNameMapping;
- public Builder arrowToSqlTypeNameMapping(Function mapper) {
+ public Builder arrowToSqlTypeNameMapping(
+ @Nullable Function mapper) {
this.arrowToSqlTypeNameMapping = mapper;
return this;
}