From 26fb3c6c36b0b54b87cf58a2746609f76dea946a Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Thu, 13 Jun 2024 12:26:10 +0200 Subject: [PATCH 01/10] SNOW-1452552: Expose vector dimension in column metadata (#1788) --- .../client/core/SFResultSetMetaData.java | 20 +++++++++++ .../client/jdbc/SnowflakeColumnMetadata.java | 13 +++++-- .../jdbc/SnowflakeDatabaseMetaData.java | 3 ++ .../jdbc/SnowflakeResultSetMetaData.java | 18 ++++++++++ .../jdbc/SnowflakeResultSetMetaDataV1.java | 10 ++++++ .../snowflake/client/jdbc/SnowflakeUtil.java | 16 +++++++-- .../client/jdbc/DatabaseMetaDataLatestIT.java | 36 +++++++++++++++++++ .../client/jdbc/SnowflakeUtilTest.java | 3 +- 8 files changed, 114 insertions(+), 5 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java index 30a680030..c39b4ec86 100644 --- a/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/core/SFResultSetMetaData.java @@ -37,6 +37,8 @@ public class SFResultSetMetaData { private List precisions; + private List dimensions; + private List scales; private List nullables; @@ -143,6 +145,7 @@ public SFResultSetMetaData( this.columnTypeNames = new ArrayList<>(this.columnCount); this.columnTypes = new ArrayList<>(this.columnCount); this.precisions = new ArrayList<>(this.columnCount); + this.dimensions = new ArrayList<>(this.columnCount); this.scales = new ArrayList<>(this.columnCount); this.nullables = new ArrayList<>(this.columnCount); this.columnSrcDatabases = new ArrayList<>(this.columnCount); @@ -156,6 +159,7 @@ public SFResultSetMetaData( columnNames.add(columnMetadata.get(colIdx).getName()); columnTypeNames.add(columnMetadata.get(colIdx).getTypeName()); precisions.add(calculatePrecision(columnMetadata.get(colIdx))); + dimensions.add(calculateDimension(columnMetadata.get(colIdx))); columnTypes.add(columnMetadata.get(colIdx).getType()); scales.add(columnMetadata.get(colIdx).getScale()); nullables.add( @@ -200,6 +204,14 @@ private Integer calculatePrecision(SnowflakeColumnMetadata columnMetadata) { } } + private Integer calculateDimension(SnowflakeColumnMetadata columnMetadata) { + int columnType = columnMetadata.getType(); + if (columnType == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + return columnMetadata.getDimension(); + } + return 0; + } + private Integer calculateDisplaySize(SnowflakeColumnMetadata columnMetadata) { int columnType = columnMetadata.getType(); switch (columnType) { @@ -403,6 +415,14 @@ public int getPrecision(int column) { } } + public int getDimension(int column) { + if (dimensions != null && dimensions.size() >= column && column > 0) { + return dimensions.get(column - 1); + } else { + return 0; + } + } + public boolean isSigned(int column) { return (columnTypes.get(column - 1) == Types.INTEGER || columnTypes.get(column - 1) == Types.DECIMAL diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java index 9f182772e..9f1cd272e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeColumnMetadata.java @@ -28,6 +28,7 @@ public class SnowflakeColumnMetadata implements Serializable { private String columnSrcDatabase; private boolean isAutoIncrement; + private int dimension; // vector type contains dimension @SnowflakeJdbcInternalApi public SnowflakeColumnMetadata( @@ -44,7 +45,8 @@ public SnowflakeColumnMetadata( String columnSrcDatabase, String columnSrcSchema, String columnSrcTable, - boolean isAutoIncrement) { + boolean isAutoIncrement, + int dimension) { this.name = name; this.type = type; this.nullable = nullable; @@ -59,11 +61,12 @@ public SnowflakeColumnMetadata( this.columnSrcSchema = columnSrcSchema; this.columnSrcTable = columnSrcTable; this.isAutoIncrement = isAutoIncrement; + this.dimension = dimension; } /** * @deprecated Use {@link SnowflakeColumnMetadata#SnowflakeColumnMetadata(String, int, boolean, - * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean)} + * int, int, int, String, boolean, SnowflakeType, List, String, String, String, boolean, int)} * instead */ @Deprecated @@ -194,6 +197,11 @@ public void setAutoIncrement(boolean autoIncrement) { isAutoIncrement = autoIncrement; } + @SnowflakeJdbcInternalApi + public int getDimension() { + return dimension; + } + public String toString() { StringBuilder sBuilder = new StringBuilder(); @@ -209,6 +217,7 @@ public String toString() { sBuilder.append(",schema=").append(columnSrcSchema); sBuilder.append(",table=").append(columnSrcTable); sBuilder.append((",isAutoIncrement=")).append(isAutoIncrement); + sBuilder.append((",dimension=")).append(dimension); return sBuilder.toString(); } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java index ff5e0529f..b50646ea7 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java @@ -1865,6 +1865,9 @@ public boolean next() throws SQLException { || columnMetadata.getType() == Types.TIME || columnMetadata.getType() == Types.TIMESTAMP) { columnSize = columnMetadata.getPrecision(); + } else if (columnMetadata.getType() == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + // For VECTOR Snowflake type we consider dimension as the column size + columnSize = columnMetadata.getDimension(); } nextRow[6] = columnSize; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java index dcc5250b5..7de89e6f5 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaData.java @@ -13,4 +13,22 @@ public interface SnowflakeResultSetMetaData { int getInternalColumnType(int column) throws SQLException; List getColumnFields(int column) throws SQLException; + + /** + * Get vector dimension + * + * @param column column index + * @return vector dimension when the column is vector type or 0 when it is not vector type + * @throws SQLException when cannot get column dimension + */ + int getDimension(int column) throws SQLException; + + /** + * Get vector dimension + * + * @param columnName column name + * @return vector dimension when the column is vector type or 0 when it is not vector type + * @throws SQLException when cannot get column dimension + */ + int getDimension(String columnName) throws SQLException; } diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java index 0a88b1ebd..b8cdb236b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeResultSetMetaDataV1.java @@ -85,6 +85,16 @@ public List getColumnFields(int column) throws SQLException { () -> resultSetMetaData.getColumnFields(column)); } + @Override + public int getDimension(int column) throws SQLException { + return resultSetMetaData.getDimension(column); + } + + @Override + public int getDimension(String columnName) throws SQLException { + return resultSetMetaData.getDimension(getColumnIndex(columnName) + 1); + } + @Override public T unwrap(Class iface) throws SQLException { logger.trace(" T unwrap(Class iface)", false); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index bdcf2af61..8c848032e 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -181,6 +181,15 @@ public static SnowflakeColumnMetadata extractColumnMetadata( int precision = colNode.path("precision").asInt(); int scale = colNode.path("scale").asInt(); int length = colNode.path("length").asInt(); + int dimension = + colNode + .path("dimension") + .asInt(); // vector dimension when checking columns via connection.getMetadata + int vectorDimension = + colNode + .path("vectorDimension") + .asInt(); // dimension when checking columns via resultSet.getMetadata + int finalVectorDimension = dimension > 0 ? dimension : vectorDimension; boolean fixed = colNode.path("fixed").asBoolean(); JsonNode udtOutputType = colNode.path("outputType"); JsonNode extColTypeNameNode = colNode.path("extTypeName"); @@ -223,7 +232,8 @@ public static SnowflakeColumnMetadata extractColumnMetadata( colSrcDatabase, colSrcSchema, colSrcTable, - isAutoIncrement); + isAutoIncrement, + finalVectorDimension); } static ColumnTypeInfo getSnowflakeType( @@ -560,7 +570,9 @@ static List describeFixedViewColumns( "", // database "", // schema "", - false)); // isAutoincrement + false, // isAutoincrement + 0 // dimension + )); } return rowType; diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index d3176f8b2..24d3940d7 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -2342,4 +2342,40 @@ public void testKeywordsCount() throws SQLException { assertEquals(43, metaData.getSQLKeywords().split(",").length); } } + /** Added in > 3.16.1 */ + @Test + public void testVectorDimension() throws SQLException { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement()) { + statement.execute( + "create or replace table JDBC_VECTOR(text_col varchar(32), float_vec VECTOR(FLOAT, 256), int_vec VECTOR(INT, 16))"); + DatabaseMetaData metaData = connection.getMetaData(); + try (ResultSet resultSet = + metaData.getColumns( + connection.getCatalog(), + connection.getSchema().replaceAll("_", "\\\\_"), + "JDBC\\_VECTOR", + null)) { + assertTrue(resultSet.next()); + assertEquals(32, resultSet.getObject("COLUMN_SIZE")); + assertTrue(resultSet.next()); + assertEquals(256, resultSet.getObject("COLUMN_SIZE")); + assertTrue(resultSet.next()); + assertEquals(16, resultSet.getObject("COLUMN_SIZE")); + assertFalse(resultSet.next()); + } + + try (ResultSet resultSet = + statement.executeQuery("Select text_col, float_vec, int_vec from JDBC_VECTOR")) { + SnowflakeResultSetMetaData unwrapResultSetMetadata = + resultSet.getMetaData().unwrap(SnowflakeResultSetMetaData.class); + assertEquals(0, unwrapResultSetMetadata.getDimension("TEXT_COL")); + assertEquals(0, unwrapResultSetMetadata.getDimension(1)); + assertEquals(256, unwrapResultSetMetadata.getDimension("FLOAT_VEC")); + assertEquals(256, unwrapResultSetMetadata.getDimension(2)); + assertEquals(16, unwrapResultSetMetadata.getDimension("INT_VEC")); + assertEquals(16, unwrapResultSetMetadata.getDimension(3)); + } + } + } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java index 23b96dc6c..1110ce4df 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -93,7 +93,8 @@ private static SnowflakeColumnMetadata createExpectedMetadata( rootNode.path("database").asText(), rootNode.path("schema").asText(), rootNode.path("table").asText(), - false); + false, + rootNode.path("dimension").asInt()); return expectedColumnMetadata; } From 6a95dccda713969cc8681f2c3bfa5791b4d8b97e Mon Sep 17 00:00:00 2001 From: Juan Martinez Ramirez <126511805+sfc-gh-jmartinez@users.noreply.github.com> Date: Mon, 17 Jun 2024 23:18:15 -0600 Subject: [PATCH 02/10] SNOW-1016467: Run tests on Windows in Github actions. (#1766) --- .github/workflows/build-test.yml | 35 +++- FIPS/pom.xml | 46 +++-- .../net/snowflake/client/RunningOnWinMac.java | 0 ci/test_windows.bat | 160 ++++++++++++++++++ pom.xml | 104 ++++++++---- 5 files changed, 291 insertions(+), 54 deletions(-) create mode 100644 FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java create mode 100644 ci/test_windows.bat diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 6c3022b75..b3c7c8bc2 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -36,9 +36,40 @@ jobs: WHITESOURCE_API_KEY: ${{ secrets.WHITESOURCE_API_KEY }} run: ./ci/build.sh + test-windows: + needs: build + name: ${{ matrix.cloud }} Windows java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + runs-on: windows-latest + strategy: + fail-fast: false + matrix: + cloud: [ 'AWS' ] + javaVersion: [ '8', '11', '17'] + category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] + additionalMavenProfile: ['', '-Dthin-jar'] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: ${{ matrix.javaVersion }} + distribution: 'temurin' + cache: maven + - uses: actions/setup-python@v4 + with: + python-version: '3.7' + architecture: 'x64' + - name: Tests + shell: cmd + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + CLOUD_PROVIDER: ${{ matrix.cloud }} + JDBC_TEST_CATEGORY: ${{ matrix.category }} + ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} + run: ci\\test_windows.bat + test-mac: needs: build - name: ${{ matrix.cloud }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} / Test on Mac(java ${{ matrix.javaVersion }}, ${{ matrix.cloud }} ) + name: ${{ matrix.cloud }} Mac java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: macos-13 strategy: fail-fast: false @@ -71,7 +102,7 @@ jobs: test-linux: needs: build - name: ${{ matrix.cloud }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} on ${{ matrix.image }} + name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: ubuntu-latest strategy: fail-fast: false diff --git a/FIPS/pom.xml b/FIPS/pom.xml index e9934e0b5..b7ae7edcc 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -323,23 +323,6 @@ - - org.codehaus.mojo - exec-maven-plugin - ${version.plugin.exec} - - - check-shaded-content - verify - - exec - - - ${basedir}/scripts/check_content.sh - - - - @@ -662,6 +645,35 @@ + + check-content + + + !windows + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + verify + + exec + + + ${basedir}/scripts/check_content.sh + + + + + + + java-9 diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java b/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java new file mode 100644 index 000000000..e69de29bb diff --git a/ci/test_windows.bat b/ci/test_windows.bat new file mode 100644 index 000000000..4f32f7564 --- /dev/null +++ b/ci/test_windows.bat @@ -0,0 +1,160 @@ +REM +REM Tests JDBC Driver on Windows +REM +setlocal +setlocal EnableDelayedExpansion +python -m venv venv +call venv\scripts\activate +pip install -U snowflake-connector-python + +cd %GITHUB_WORKSPACE% + +if "%CLOUD_PROVIDER%"=="AZURE" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_azure.json.gpg +) else if "%CLOUD_PROVIDER%"=="GCP" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_gcp.json.gpg +) else if "%CLOUD_PROVIDER%"=="AWS" ( + set ENCODED_PARAMETERS_FILE=.github/workflows/parameters_aws.json.gpg +) else ( + echo === unknown cloud provider + exit /b 1 +) + +gpg --quiet --batch --yes --decrypt --passphrase=%PARAMETERS_SECRET% --output parameters.json %ENCODED_PARAMETERS_FILE% + +REM DON'T FORGET TO include @echo off here or the password may be leaked! +echo @echo off>parameters.bat +jq -r ".testconnection | to_entries | map(\"set \(.key)=\(.value)\") | .[]" parameters.json >> parameters.bat +call parameters.bat +if %ERRORLEVEL% NEQ 0 ( + echo === failed to set the test parameters + exit /b 1 +) +echo @echo off>parametersorg.bat +jq -r ".orgconnection | to_entries | map(\"set \(.key)=\(.value)\") | .[]" parameters.json >> parametersorg.bat +call parametersorg.bat +if %ERRORLEVEL% NEQ 0 ( + echo === failed to set the org parameters + exit /b 1 +) +set SNOWFLAKE_TEST_SCHEMA=%RUNNER_TRACKING_ID:-=_%_%GITHUB_SHA% +set TARGET_SCHEMA_NAME=%SNOWFLAKE_TEST_SCHEMA% + +echo [INFO] Account: %SNOWFLAKE_TEST_ACCOUNT% +echo [INFO] User : %SNOWFLAKE_TEST_USER% +echo [INFO] Database: %SNOWFLAKE_TEST_DATABASE% +echo [INFO] Schema: %SNOWFLAKE_TEST_SCHEMA% +echo [INFO] Warehouse: %SNOWFLAKE_TEST_WAREHOUSE% +echo [INFO] Role: %SNOWFLAKE_TEST_ROLE% + +echo [INFO] Creating schema %SNOWFLAKE_TEST_SCHEMA% +pushd %GITHUB_WORKSPACE%\ci\container +python create_schema.py +popd + +REM setup log + +set CLIENT_LOG_DIR_PATH=%GITHUB_WORKSPACE%\jenkins_rt_logs +echo "[INFO] CLIENT_LOG_DIR_PATH=%CLIENT_LOG_DIR_PATH%" + +set CLIENT_LOG_FILE_PATH=%CLIENT_LOG_DIR_PATH%\ssnowflake_ssm_rt.log +echo "[INFO] CLIENT_LOG_FILE_PATH=%CLIENT_LOG_FILE_PATH%" + +set CLIENT_KNOWN_SSM_FILE_PATH=%CLIENT_LOG_DIR_PATH%\rt_jenkins_log_known_ssm.txt +echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH=%CLIENT_KNOWN_SSM_FILE_PATH%" + +REM To close log analyze, just set ENABLE_CLIENT_LOG_ANALYZE to not "true", e.g. "false". +set ENABLE_CLIENT_LOG_ANALYZE=true + +REM The new complex password we use for jenkins test +set SNOWFLAKE_TEST_PASSWORD_NEW="ThisIsRandomPassword123!" + +set LOG_PROPERTY_FILE=%GITHUB_WORKSPACE%\src\test\resources\logging.properties + +echo "[INFO] LOG_PROPERTY_FILE=%LOG_PROPERTY_FILE%" + +set CLIENT_DRIVER_NAME=JDBC + +powershell -Command "(Get-Content %LOG_PROPERTY_FILE%) | Foreach-Object { $_ -replace '^java.util.logging.FileHandler.pattern.*', 'java.util.logging.FileHandler.pattern = %CLIENT_LOG_FILE_PATH%' } | Set-Content %LOG_PROPERTY_FILE%" + +echo "[INFO] Create log directory" + +IF NOT EXIST %CLIENT_LOG_DIR_PATH% MD %CLIENT_LOG_DIR_PATH% 2>nul + +echo "[INFO] Delete ssm file" +IF EXIST "%CLIENT_KNOWN_SSM_FILE_PATH%" DEL /F /Q "%CLIENT_KNOWN_SSM_FILE_PATH%" + +echo "[INFO] Create ssm file" +echo.>"%CLIENT_KNOWN_SSM_FILE_PATH%" + +echo "[INFO] Finish log setup" +REM end setup log + +for /F "tokens=1,* delims==" %%i in ('set ^| findstr /I /R "^SNOWFLAKE_[^=]*$" ^| findstr /I /V /R "^SNOWFLAKE_PASS_[^=]*$" ^| sort') do ( + echo %%i=%%j +) + +echo [INFO] Starting hang_webserver.py 12345 +pushd %GITHUB_WORKSPACE%\ci\container +start /b python hang_webserver.py 12345 > hang_webserver.out 2>&1 +popd + +echo [INFO] Testing + +set MVNW_EXE=%GITHUB_WORKSPACE%\mvnw.cmd + +REM Avoid connection timeouts +set MAVEN_OPTS="-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120" +echo "MAVEN OPTIONS %MAVEN_OPTS%" + +REM Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched +cmd /c %MVNW_EXE% --batch-mode --show-version dependency:go-offline + +echo list = "%JDBC_TEST_CATEGORY%" +for %%a in ("%JDBC_TEST_CATEGORY:,=" "%") do ( + echo "Current category to execute" %%a + if /i %%a=="TestCategoryFips" ( + pushd FIPS + echo "[INFO] Run Fips tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + popd ) else ( + echo "[INFO] Run %%a tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -DtestCategory=net.snowflake.client.category.%%a ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + ) +) + +echo [INFO] Dropping schema %SNOWFLAKE_TEST_SCHEMA% +pushd %GITHUB_WORKSPACE%\ci\container +python drop_schema.py +popd diff --git a/pom.xml b/pom.xml index 9e5f5c744..19b5ad10e 100644 --- a/pom.xml +++ b/pom.xml @@ -746,25 +746,6 @@ - - org.codehaus.mojo - exec-maven-plugin - - - check-shaded-content - - exec - - verify - - ${basedir}/ci/scripts/check_content.sh - - -thin - - - - - @@ -1087,22 +1068,6 @@ - - org.codehaus.mojo - exec-maven-plugin - - - check-shaded-content - - exec - - verify - - ${basedir}/ci/scripts/check_content.sh - - - - @@ -1231,6 +1196,75 @@ + + + check-content + + + !windows + + + !thin-jar + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + + exec + + verify + + ${basedir}/ci/scripts/check_content.sh + + + + + + + + + + check-content-thin + + + !windows + + + thin-jar + + + + + + org.codehaus.mojo + exec-maven-plugin + ${version.plugin.exec} + + + check-shaded-content + + exec + + verify + + ${basedir}/ci/scripts/check_content.sh + + -thin + + + + + + + + qa1IT From 9f438a956d58bc94b03140d3c57f8058d5862b56 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Thu, 20 Jun 2024 10:46:33 +0200 Subject: [PATCH 03/10] SNOW-1488827: Fix SFTrustManagerIT tests (#1793) --- .../client/core/SFTrustManagerIT.java | 137 +++++++++++------- .../client/jdbc/ConnectionLatestIT.java | 4 +- 2 files changed, 85 insertions(+), 56 deletions(-) diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java index 0a9d96dd2..f30cd88e1 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java @@ -20,9 +20,12 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLHandshakeException; import net.snowflake.client.category.TestCategoryCore; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; @@ -32,22 +35,37 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +@RunWith(Parameterized.class) @Category(TestCategoryCore.class) public class SFTrustManagerIT extends BaseJDBCTest { - private static final String[] TARGET_HOSTS = { - "storage.googleapis.com", - "ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch", - "sfcsupport.snowflakecomputing.com", - "sfcsupport.us-east-1.snowflakecomputing.com", - "sfcsupport.eu-central-1.snowflakecomputing.com", - "sfc-dev1-regression.s3.amazonaws.com", - "sfc-ds2-customer-stage.s3.amazonaws.com", - "snowflake.okta.com", - "sfcdev2.blob.core.windows.net" - }; + private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManagerIT.class); + + public SFTrustManagerIT(String host) { + this.host = host; + } + + @Parameterized.Parameters(name = "host={0}") + public static Object[][] data() { + return new Object[][] { + // this host generates many "SSLHandshake Certificate Revocation + // check failed. Could not retrieve OCSP Response." when running in parallel CI builds + // {"storage.googleapis.com"}, + {"ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch"}, + {"sfcsupport.snowflakecomputing.com"}, + {"sfcsupport.us-east-1.snowflakecomputing.com"}, + {"sfcsupport.eu-central-1.snowflakecomputing.com"}, + {"sfc-dev1-regression.s3.amazonaws.com"}, + {"sfc-ds2-customer-stage.s3.amazonaws.com"}, + {"snowflake.okta.com"}, + {"sfcdev2.blob.core.windows.net"} + }; + } private boolean defaultState; + private final String host; @Before public void setUp() { @@ -83,15 +101,13 @@ public void tearDown() throws InterruptedException { public void testOcsp() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - null, // default OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + null, // default OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** @@ -104,15 +120,13 @@ public void testOcspWithFileCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ @@ -121,15 +135,13 @@ public void testOcspWithServerCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** @@ -141,15 +153,13 @@ public void testOcspWithoutServerCache() throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); File ocspCacheFile = tmpFolder.newFile(); - for (String host : TARGET_HOSTS) { - HttpClient client = - HttpUtil.buildHttpClient( - new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), - ocspCacheFile, // a temp OCSP response cache file - false // enable decompression - ); - accessHost(host, client); - } + HttpClient client = + HttpUtil.buildHttpClient( + new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), + ocspCacheFile, // a temp OCSP response cache file + false // enable decompression + ); + accessHost(host, client); } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ @@ -159,7 +169,6 @@ public void testInvalidCacheFile() throws Throwable { SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); // a file under never exists. File ocspCacheFile = new File("NEVER_EXISTS", "NEVER_EXISTS"); - String host = TARGET_HOSTS[0]; HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), @@ -169,22 +178,40 @@ public void testInvalidCacheFile() throws Throwable { accessHost(host, client); } - private static void accessHost(String host, HttpClient client) throws IOException { - int statusCode = -1; - - HttpGet httpRequest = new HttpGet(String.format("https://%s:443/", host)); - HttpResponse response = client.execute(httpRequest); - statusCode = response.getStatusLine().getStatusCode(); + private static void accessHost(String host, HttpClient client) + throws IOException, InterruptedException { + HttpResponse response = executeWithRetries(host, client); await() .atMost(Duration.ofSeconds(10)) .until(() -> response.getStatusLine().getStatusCode(), not(equalTo(-1))); + assertThat( String.format("response code for %s", host), - statusCode, + response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(400), equalTo(403), equalTo(404), equalTo(513))); } + private static HttpResponse executeWithRetries(String host, HttpClient client) + throws IOException, InterruptedException { + // There is one host that causes SSLHandshakeException very often - let's retry + int maxRetries = host.equals("storage.googleapis.com") ? 5 : 0; + int retries = 0; + HttpGet httpRequest = new HttpGet(String.format("https://%s:443/", host)); + while (true) { + try { + return client.execute(httpRequest); + } catch (SSLHandshakeException e) { + logger.warn("SSL handshake failed (host = {}, retries={}}", host, retries, e); + ++retries; + if (retries >= maxRetries) { + throw e; + } + Thread.sleep(retries * 1000); + } + } + } + /** * TODO: we should re-enable this https://snowflakecomputing.atlassian.net/browse/SNOW-146911 * Revoked certificate test. @Test public void testRevokedCertificate() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index 02ba5a983..0e7ab4648 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -427,8 +427,10 @@ public void testQueryStatusErrorMessageAndErrorCodeChangeOnAsyncQuery() throws S await() .atMost(Duration.ofSeconds(10)) .until(() -> sfResultSet.getStatusV2().getStatus(), equalTo(QueryStatus.RUNNING)); + + // it may take more time to finish the test when running in parallel in CI builds await() - .atMost(Duration.ofSeconds(50)) + .atMost(Duration.ofSeconds(360)) .until(() -> sfResultSet.getStatusV2().getStatus(), equalTo(QueryStatus.SUCCESS)); } } From 184d6ffbc16b9a922537f9a5529ec2f7199e110d Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Fri, 21 Jun 2024 02:35:01 +0900 Subject: [PATCH 04/10] SNOW-1016470: Increase code coverage in JDBC part1 (#1759) --- .../client/core/ExecTimeTelemetryData.java | 2 +- .../java/net/snowflake/client/TestUtil.java | 16 +++ .../config/SFClientConfigParserTest.java | 1 + .../core/ExecTimeTelemetryDataTest.java | 84 ++++++++++++++ .../client/core/QueryContextCacheTest.java | 8 ++ .../client/core/SQLInputOutputTest.java | 42 +++++++ .../client/core/bind/BindExceptionTest.java | 23 ++++ .../snowflake/client/jdbc/BaseJDBCTest.java | 5 + .../client/jdbc/ResultSetAlreadyClosedIT.java | 37 +++++- .../client/jdbc/ResultSetLatestIT.java | 62 +++++++++- .../client/jdbc/SnowflakeTypeTest.java | 108 ++++++++++++++++++ .../storage/SnowflakeAzureClientLatestIT.java | 14 ++- ...ogicalConnectionAlreadyClosedLatestIT.java | 1 + .../pooling/LogicalConnectionLatestIT.java | 77 +++++++++++++ 14 files changed, 466 insertions(+), 14 deletions(-) create mode 100644 src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java create mode 100644 src/test/java/net/snowflake/client/core/SQLInputOutputTest.java create mode 100644 src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java create mode 100644 src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java diff --git a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java index a9d40a054..91d45f29f 100644 --- a/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java +++ b/src/main/java/net/snowflake/client/core/ExecTimeTelemetryData.java @@ -154,7 +154,7 @@ public String generateTelemetry() { value.put("ProcessResultChunkStart", this.processResultChunk.getStart()); value.put("ProcessResultChunkEnd", this.processResultChunk.getEnd()); value.put("CreateResultSetStart", this.createResultSet.getStart()); - value.put("CreatResultSetEnd", this.createResultSet.getEnd()); + value.put("CreateResultSetEnd", this.createResultSet.getEnd()); value.put("QueryEnd", this.query.getEnd()); value.put("BatchID", this.batchId); value.put("QueryID", this.queryId); diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index afed53dd7..76487bcb4 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -5,9 +5,12 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.List; @@ -128,4 +131,17 @@ public static void withRandomSchema( statement.execute("DROP SCHEMA " + customSchema); } } + + public interface MethodRaisesSQLException { + void run() throws SQLException; + } + + public static void expectSnowflakeLoggedFeatureNotSupportedException(MethodRaisesSQLException f) { + try { + f.run(); + fail("must raise exception"); + } catch (SQLException ex) { + assertEquals(ex.getClass().getSimpleName(), "SnowflakeLoggedFeatureNotSupportedException"); + } + } } diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java index 413b732ff..a00784f68 100644 --- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java @@ -47,6 +47,7 @@ public void testLoadSFClientConfigValidPath() throws IOException { SFClientConfigParser.loadSFClientConfig(configFilePath.toString()); assertEquals("info", actualConfig.getCommonProps().getLogLevel()); assertEquals("/jdbc.log", actualConfig.getCommonProps().getLogPath()); + assertEquals("config.json", actualConfig.getConfigFilePath()); } @Test diff --git a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java new file mode 100644 index 000000000..f7ad06b46 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java @@ -0,0 +1,84 @@ +package net.snowflake.client.core; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import net.minidev.json.JSONObject; +import net.minidev.json.parser.JSONParser; +import net.minidev.json.parser.ParseException; +import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; +import org.junit.Test; + +public class ExecTimeTelemetryDataTest { + + @Test + public void testExecTimeTelemetryData() throws ParseException { + ExecTimeTelemetryData execTimeTelemetryData = new ExecTimeTelemetryData(); + execTimeTelemetryData.sendData = true; + execTimeTelemetryData.setBindStart(); + execTimeTelemetryData.setOCSPStatus(true); + execTimeTelemetryData.setBindEnd(); + execTimeTelemetryData.setHttpClientStart(); + execTimeTelemetryData.setHttpClientEnd(); + execTimeTelemetryData.setGzipStart(); + execTimeTelemetryData.setGzipEnd(); + execTimeTelemetryData.setQueryEnd(); + execTimeTelemetryData.setQueryId("queryid"); + execTimeTelemetryData.setProcessResultChunkStart(); + execTimeTelemetryData.setProcessResultChunkEnd(); + execTimeTelemetryData.setResponseIOStreamStart(); + execTimeTelemetryData.setResponseIOStreamEnd(); + execTimeTelemetryData.setCreateResultSetStart(); + execTimeTelemetryData.setCreateResultSetEnd(); + execTimeTelemetryData.incrementRetryCount(); + execTimeTelemetryData.setRequestId("mockId"); + execTimeTelemetryData.addRetryLocation("retry"); + + String telemetry = execTimeTelemetryData.generateTelemetry(); + JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); + JSONObject json = (JSONObject) parser.parse(telemetry); + assertNotNull(json.get("BindStart")); + assertNotNull(json.get("BindEnd")); + assertEquals(json.get("ocspEnabled"), true); + assertNotNull(json.get("HttpClientStart")); + assertNotNull(json.get("HttpClientEnd")); + assertNotNull(json.get("GzipStart")); + assertNotNull(json.get("GzipEnd")); + assertNotNull(json.get("QueryEnd")); + assertEquals(json.get("QueryID"), "queryid"); + assertNotNull(json.get("ProcessResultChunkStart")); + assertNotNull(json.get("ProcessResultChunkEnd")); + assertNotNull(json.get("ResponseIOStreamStart")); + assertNotNull(json.get("CreateResultSetStart")); + assertNotNull(json.get("CreateResultSetEnd")); + assertNotNull(json.get("ElapsedQueryTime")); + assertNotNull(json.get("ElapsedResultProcessTime")); + assertNull(json.get("QueryFunction")); + assertNull(json.get("BatchID")); + assertEquals(((Long) json.get("RetryCount")).intValue(), 1); + assertEquals(json.get("RequestID"), "mockId"); + assertEquals(json.get("RetryLocations"), "retry"); + assertEquals(json.get("Urgent"), true); + assertEquals(json.get("eventType"), "ExecutionTimeRecord"); + } + + @Test + public void testRetryLocation() throws ParseException { + TelemetryService.enableHTAP(); + ExecTimeTelemetryData execTimeTelemetryData = + new ExecTimeTelemetryData("queryFunction", "batchId"); + execTimeTelemetryData.addRetryLocation("hello"); + execTimeTelemetryData.addRetryLocation("world"); + execTimeTelemetryData.sendData = true; + String telemetry = execTimeTelemetryData.generateTelemetry(); + + JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); + JSONObject json = (JSONObject) parser.parse(telemetry); + assertEquals(json.get("QueryFunction"), "queryFunction"); + assertEquals(json.get("BatchID"), "batchId"); + assertNotNull(json.get("QueryStart")); + assertEquals(json.get("RetryLocations"), "hello, world"); + TelemetryService.disableHTAP(); + } +} diff --git a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java index cd841b474..862dd1c40 100644 --- a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java +++ b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java @@ -6,6 +6,9 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import org.junit.Test; @@ -217,6 +220,11 @@ public void testSerializeRequestAndDeserializeResponseDataWithNullContext() thro qcc.deserializeQueryContextDTO(requestData); assertCacheDataWithContext(null); + + QueryContextCache mockQcc = spy(qcc); + mockQcc.deserializeQueryContextDTO(null); + verify(mockQcc).clearCache(); + verify(mockQcc, times(2)).logCacheEntries(); } private void assertCacheData() { diff --git a/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java new file mode 100644 index 000000000..346d43c34 --- /dev/null +++ b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java @@ -0,0 +1,42 @@ +package net.snowflake.client.core; + +import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; +import static org.mockito.Mockito.mock; + +import java.sql.SQLData; +import org.junit.Test; + +public class SQLInputOutputTest { + + @Test + public void testBaseSQLUnSupportedException() { + BaseSqlInput sqlInput = new ArrowSqlInput(null, null, null, null); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readCharacterStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readAsciiStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readBinaryStream); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readRef); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readBlob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readClob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readArray); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readURL); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readNClob); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readNString); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readSQLXML); + expectSnowflakeLoggedFeatureNotSupportedException(sqlInput::readRowId); + } + + @Test + public void testJsonSqlOutPutUnSupportedTest() { + JsonSqlOutput sqloutput = new JsonSqlOutput(mock(SQLData.class), mock(SFBaseSession.class)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeRef(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeBlob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeClob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeStruct(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeArray(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeURL(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeNString(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeNClob(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeRowId(null)); + expectSnowflakeLoggedFeatureNotSupportedException(() -> sqloutput.writeSQLXML(null)); + } +} diff --git a/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java new file mode 100644 index 000000000..f3ae88eee --- /dev/null +++ b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java @@ -0,0 +1,23 @@ +package net.snowflake.client.core.bind; + +import static org.junit.Assert.assertEquals; + +import net.snowflake.client.jdbc.telemetry.TelemetryField; +import org.junit.Test; + +public class BindExceptionTest { + + @Test + public void testBindExceptionType() { + assertEquals(BindException.Type.SERIALIZATION.field, TelemetryField.FAILED_BIND_SERIALIZATION); + assertEquals(BindException.Type.UPLOAD.field, TelemetryField.FAILED_BIND_UPLOAD); + assertEquals(BindException.Type.OTHER.field, TelemetryField.FAILED_BIND_OTHER); + } + + @Test + public void testBindExceptionConstructor() { + BindException exception = new BindException("testException", BindException.Type.SERIALIZATION); + assertEquals(exception.getMessage(), "testException"); + assertEquals(exception.type.field, TelemetryField.FAILED_BIND_SERIALIZATION); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java index b8bacc82b..a326dea12 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java @@ -35,6 +35,7 @@ import javax.xml.transform.Result; import javax.xml.transform.Source; import net.snowflake.client.AbstractDriverIT; +import net.snowflake.client.core.SFException; public class BaseJDBCTest extends AbstractDriverIT { // Test UUID unique per session @@ -44,6 +45,10 @@ protected interface MethodRaisesSQLException { void run() throws SQLException; } + protected interface MethodRaisesSFException { + void run() throws SFException; + } + protected interface MethodRaisesSQLClientInfoException { void run() throws SQLClientInfoException; } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java index 292d71949..d2939cc8a 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java @@ -22,8 +22,9 @@ public class ResultSetAlreadyClosedIT extends BaseJDBCTest { @Test public void testQueryResultSetAlreadyClosed() throws Throwable { try (Connection connection = getConnection(); - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery("select 1")) { + Statement statement = connection.createStatement()) { + ResultSet resultSet = statement.executeQuery("select 1"); + resultSet.close(); checkAlreadyClosed(resultSet); } } @@ -44,9 +45,18 @@ public void testMetadataResultSetAlreadyClosed() throws Throwable { } @Test - public void testEmptyResultSetAlreadyClosed() throws Throwable { - try (ResultSet resultSet = new SnowflakeResultSetV1.EmptyResultSet()) { + public void testResultSetAlreadyClosed() throws Throwable { + try (Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery("SELECT 1")) { checkAlreadyClosed(resultSet); + } + } + + @Test + public void testEmptyResultSetAlreadyClosed() throws Throwable { + try (SnowflakeResultSetV1.EmptyResultSet resultSet = + new SnowflakeResultSetV1.EmptyResultSet()) { checkAlreadyClosedEmpty(resultSet); } } @@ -68,7 +78,6 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException(() -> resultSet.getDouble(1)); expectResultSetAlreadyClosedException(() -> resultSet.getBigDecimal(1)); expectResultSetAlreadyClosedException(() -> resultSet.getBytes(1)); - expectResultSetAlreadyClosedException(() -> resultSet.getString(1)); expectResultSetAlreadyClosedException(() -> resultSet.getDate(1)); expectResultSetAlreadyClosedException(() -> resultSet.getTime(1)); expectResultSetAlreadyClosedException(() -> resultSet.getTimestamp(1)); @@ -105,7 +114,13 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException(() -> resultSet.getBigDecimal("col1", 38)); expectResultSetAlreadyClosedException(resultSet::getWarnings); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).getWarnings()); + expectResultSetAlreadyClosedException(resultSet::clearWarnings); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).clearWarnings()); + expectResultSetAlreadyClosedException(resultSet::getMetaData); expectResultSetAlreadyClosedException(() -> resultSet.findColumn("col1")); @@ -119,11 +134,20 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { expectResultSetAlreadyClosedException( () -> resultSet.setFetchDirection(ResultSet.FETCH_FORWARD)); expectResultSetAlreadyClosedException(() -> resultSet.setFetchSize(10)); + expectResultSetAlreadyClosedException( + () -> resultSet.unwrap(SnowflakeBaseResultSet.class).setFetchSize(10)); + expectResultSetAlreadyClosedException(resultSet::getFetchDirection); expectResultSetAlreadyClosedException(resultSet::getFetchSize); expectResultSetAlreadyClosedException(resultSet::getType); expectResultSetAlreadyClosedException(resultSet::getConcurrency); + expectResultSetAlreadyClosedException( + resultSet.unwrap(SnowflakeBaseResultSet.class)::getConcurrency); + expectResultSetAlreadyClosedException(resultSet::getHoldability); + expectResultSetAlreadyClosedException( + resultSet.unwrap(SnowflakeBaseResultSet.class)::getHoldability); + expectResultSetAlreadyClosedException(resultSet::getStatement); } @@ -133,7 +157,8 @@ private void checkAlreadyClosed(ResultSet resultSet) throws SQLException { * @param resultSet * @throws SQLException */ - private void checkAlreadyClosedEmpty(ResultSet resultSet) throws SQLException { + private void checkAlreadyClosedEmpty(SnowflakeResultSetV1.EmptyResultSet resultSet) + throws SQLException { resultSet.close(); resultSet.close(); // second close won't raise exception assertTrue(resultSet.isClosed()); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index add205145..fb55a9780 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -3,6 +3,7 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertArrayEquals; @@ -27,7 +28,6 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; @@ -808,14 +808,64 @@ public void testCallStatementType() throws SQLException { * implemented for synchronous queries * */ @Test - public void testNewFeaturesNotSupported() throws SQLException { + public void testNewFeaturesNotSupportedExeceptions() throws SQLException { + try (Connection con = init(); + Statement statement = con.createStatement(); + ResultSet rs = statement.executeQuery("select 1")) { + expectSnowflakeLoggedFeatureNotSupportedException( + rs.unwrap(SnowflakeResultSet.class)::getQueryErrorMessage); + expectSnowflakeLoggedFeatureNotSupportedException( + rs.unwrap(SnowflakeResultSet.class)::getStatus); + expectSnowflakeLoggedFeatureNotSupportedException(() -> rs.getArray(1)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getList(1, String.class)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getArray(1, String.class)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getMap(1, String.class)); + + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getUnicodeStream(1)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> rs.unwrap(SnowflakeBaseResultSet.class).getUnicodeStream("column1")); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateAsciiStream("column1", new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateBinaryStream("column1", new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateCharacterStream("column1", new FakeReader(), 5L)); + + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateAsciiStream(1, new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateBinaryStream(1, new FakeInputStream(), 5L)); + expectSnowflakeLoggedFeatureNotSupportedException( + () -> + rs.unwrap(SnowflakeBaseResultSet.class) + .updateCharacterStream(1, new FakeReader(), 5L)); + } + } + + @Test + public void testInvalidUnWrap() throws SQLException { try (Connection con = init(); ResultSet rs = con.createStatement().executeQuery("select 1")) { try { - rs.unwrap(SnowflakeResultSet.class).getQueryErrorMessage(); - } catch (SQLFeatureNotSupportedException ex) { - // catch SQLFeatureNotSupportedException - assertEquals("This function is only supported for asynchronous queries.", ex.getMessage()); + rs.unwrap(SnowflakeUtil.class); + } catch (SQLException ex) { + assertEquals( + ex.getMessage(), + "net.snowflake.client.jdbc.SnowflakeResultSetV1 not unwrappable from net.snowflake.client.jdbc.SnowflakeUtil"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java new file mode 100644 index 000000000..29c58b787 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java @@ -0,0 +1,108 @@ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.jdbc.SnowflakeType.convertStringToType; +import static net.snowflake.client.jdbc.SnowflakeType.getJavaType; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import java.math.BigDecimal; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Types; +import org.junit.Test; + +public class SnowflakeTypeTest { + + @Test + public void testSnowflakeType() { + assertEquals(getJavaType(SnowflakeType.CHAR, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals(getJavaType(SnowflakeType.INTEGER, false), SnowflakeType.JavaDataType.JAVA_LONG); + assertEquals( + getJavaType(SnowflakeType.FIXED, false), SnowflakeType.JavaDataType.JAVA_BIGDECIMAL); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals(getJavaType(SnowflakeType.TIME, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_LTZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_NTZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.TIMESTAMP_TZ, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals(getJavaType(SnowflakeType.DATE, false), SnowflakeType.JavaDataType.JAVA_TIMESTAMP); + assertEquals( + getJavaType(SnowflakeType.BOOLEAN, false), SnowflakeType.JavaDataType.JAVA_BOOLEAN); + assertEquals(getJavaType(SnowflakeType.VECTOR, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals(getJavaType(SnowflakeType.BINARY, false), SnowflakeType.JavaDataType.JAVA_BYTES); + assertEquals(getJavaType(SnowflakeType.ANY, false), SnowflakeType.JavaDataType.JAVA_OBJECT); + assertEquals(getJavaType(SnowflakeType.OBJECT, true), SnowflakeType.JavaDataType.JAVA_OBJECT); + assertEquals(getJavaType(SnowflakeType.OBJECT, false), SnowflakeType.JavaDataType.JAVA_STRING); + assertEquals( + getJavaType(SnowflakeType.GEOMETRY, false), SnowflakeType.JavaDataType.JAVA_STRING); + } + + @Test + public void testConvertStringToType() { + assertEquals(convertStringToType(null), Types.NULL); + assertEquals(convertStringToType("decimal"), Types.DECIMAL); + assertEquals(convertStringToType("int"), Types.INTEGER); + assertEquals(convertStringToType("integer"), Types.INTEGER); + assertEquals(convertStringToType("byteint"), Types.INTEGER); + assertEquals(convertStringToType("smallint"), Types.SMALLINT); + assertEquals(convertStringToType("bigint"), Types.BIGINT); + assertEquals(convertStringToType("double"), Types.DOUBLE); + assertEquals(convertStringToType("double precision"), Types.DOUBLE); + assertEquals(convertStringToType("real"), Types.REAL); + assertEquals(convertStringToType("char"), Types.CHAR); + assertEquals(convertStringToType("character"), Types.CHAR); + assertEquals(convertStringToType("varbinary"), Types.VARBINARY); + assertEquals(convertStringToType("boolean"), Types.BOOLEAN); + assertEquals(convertStringToType("date"), Types.DATE); + assertEquals(convertStringToType("time"), Types.TIME); + assertEquals(convertStringToType("timestamp"), Types.TIMESTAMP); + assertEquals(convertStringToType("datetime"), Types.TIMESTAMP); + assertEquals(convertStringToType("timestamp_ntz"), Types.TIMESTAMP); + assertEquals(convertStringToType("timestamp_ltz"), Types.TIMESTAMP_WITH_TIMEZONE); + assertEquals(convertStringToType("timestamp_tz"), Types.TIMESTAMP_WITH_TIMEZONE); + assertEquals(convertStringToType("variant"), Types.OTHER); + assertEquals(convertStringToType("object"), Types.JAVA_OBJECT); + assertEquals(convertStringToType("vector"), SnowflakeUtil.EXTRA_TYPES_VECTOR); + assertEquals(convertStringToType("array"), Types.ARRAY); + assertEquals(convertStringToType("default"), Types.OTHER); + } + + @Test + public void testJavaSQLTypeFind() { + assertNull(SnowflakeType.JavaSQLType.find(200000)); + } + + @Test + public void testJavaSQLTypeLexicalValue() { + assertEquals(SnowflakeType.lexicalValue(1.0f, null, null, null, null), "0x1.0p0"); + assertEquals(SnowflakeType.lexicalValue(new BigDecimal(100.0), null, null, null, null), "100"); + assertEquals( + SnowflakeType.lexicalValue("random".getBytes(), null, null, null, null), "72616E646F6D"); + } + + @Test + public void testJavaTypeToSFType() throws SnowflakeSQLException { + assertEquals(SnowflakeType.javaTypeToSFType(0, null), SnowflakeType.ANY); + assertThrows( + SnowflakeSQLLoggedException.class, + () -> { + SnowflakeType.javaTypeToSFType(2000000, null); + }); + } + + @Test + public void testJavaTypeToClassName() throws SQLException { + assertEquals(SnowflakeType.javaTypeToClassName(Types.DECIMAL), BigDecimal.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.TIME), java.sql.Time.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.BOOLEAN), Boolean.class.getName()); + assertThrows( + SQLFeatureNotSupportedException.class, + () -> { + SnowflakeType.javaTypeToClassName(-2000000); + }); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java index c667b7a3f..93539005a 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java @@ -1,8 +1,11 @@ package net.snowflake.client.jdbc.cloud.storage; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; +import static org.mockito.Mockito.spy; +import com.microsoft.azure.storage.blob.ListBlobItem; import java.sql.Connection; import java.sql.SQLException; import net.snowflake.client.ConditionalIgnoreRule; @@ -17,7 +20,6 @@ import org.junit.Test; public class SnowflakeAzureClientLatestIT extends BaseJDBCTest { - @Test @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { @@ -37,4 +39,14 @@ public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { } } } + + @Test + public void testCloudExceptionTest() { + Iterable mockList = null; + AzureObjectSummariesIterator iterator = new AzureObjectSummariesIterator(mockList); + AzureObjectSummariesIterator spyIterator = spy(iterator); + UnsupportedOperationException ex = + assertThrows(UnsupportedOperationException.class, () -> spyIterator.remove()); + assertEquals(ex.getMessage(), "remove() method not supported"); + } } diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java index ac50f7608..ce93928ac 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java @@ -49,5 +49,6 @@ public void testLogicalConnectionAlreadyClosed() throws SQLException { expectConnectionAlreadyClosedException(() -> logicalConnection.setSchema("fakedb")); expectConnectionAlreadyClosedException( () -> logicalConnection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED)); + expectConnectionAlreadyClosedException(() -> logicalConnection.createArrayOf("faketype", null)); } } diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java index bf05325e0..d25cdb485 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java @@ -6,8 +6,14 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.sql.CallableStatement; import java.sql.Clob; @@ -370,6 +376,77 @@ public void testDatabaseMetaData() throws SQLException { } } + @Test + public void testLogicalConnectionWhenPhysicalConnectionThrowsErrors() throws SQLException { + Connection connection = mock(Connection.class); + SnowflakePooledConnection snowflakePooledConnection = mock(SnowflakePooledConnection.class); + when(snowflakePooledConnection.getPhysicalConnection()).thenReturn(connection); + SQLException sqlException = new SQLException("mocking error"); + when(connection.createStatement()).thenThrow(sqlException); + when(connection.createStatement(1, 2, 3)).thenThrow(sqlException); + + when(connection.prepareStatement("mocksql")).thenThrow(sqlException); + when(connection.prepareCall("mocksql")).thenThrow(sqlException); + when(connection.prepareCall("mocksql", 1, 2, 3)).thenThrow(sqlException); + when(connection.nativeSQL("mocksql")).thenThrow(sqlException); + when(connection.getAutoCommit()).thenThrow(sqlException); + when(connection.getMetaData()).thenThrow(sqlException); + when(connection.isReadOnly()).thenThrow(sqlException); + when(connection.getCatalog()).thenThrow(sqlException); + when(connection.getTransactionIsolation()).thenThrow(sqlException); + when(connection.getWarnings()).thenThrow(sqlException); + when(connection.prepareCall("mocksql", 1, 2)).thenThrow(sqlException); + when(connection.getTypeMap()).thenThrow(sqlException); + when(connection.getHoldability()).thenThrow(sqlException); + when(connection.createClob()).thenThrow(sqlException); + when(connection.getClientInfo("mocksql")).thenThrow(sqlException); + when(connection.getClientInfo()).thenThrow(sqlException); + when(connection.createArrayOf("mock", null)).thenThrow(sqlException); + when(connection.getSchema()).thenThrow(sqlException); + when(connection.getNetworkTimeout()).thenThrow(sqlException); + when(connection.isWrapperFor(Connection.class)).thenThrow(sqlException); + + doThrow(sqlException).when(connection).setAutoCommit(false); + doThrow(sqlException).when(connection).commit(); + doThrow(sqlException).when(connection).rollback(); + doThrow(sqlException).when(connection).setReadOnly(false); + doThrow(sqlException).when(connection).clearWarnings(); + doThrow(sqlException).when(connection).setSchema(null); + doThrow(sqlException).when(connection).abort(null); + doThrow(sqlException).when(connection).setNetworkTimeout(null, 1); + + LogicalConnection logicalConnection = new LogicalConnection(snowflakePooledConnection); + + assertThrows(SQLException.class, logicalConnection::createStatement); + assertThrows(SQLException.class, () -> logicalConnection.createStatement(1, 2, 3)); + assertThrows(SQLException.class, () -> logicalConnection.nativeSQL("mocksql")); + assertThrows(SQLException.class, logicalConnection::getAutoCommit); + assertThrows(SQLException.class, logicalConnection::getMetaData); + assertThrows(SQLException.class, logicalConnection::isReadOnly); + assertThrows(SQLException.class, logicalConnection::getCatalog); + assertThrows(SQLException.class, logicalConnection::getTransactionIsolation); + assertThrows(SQLException.class, logicalConnection::getWarnings); + assertThrows(SQLException.class, () -> logicalConnection.prepareCall("mocksql")); + assertThrows(SQLException.class, logicalConnection::getTypeMap); + assertThrows(SQLException.class, logicalConnection::getHoldability); + assertThrows(SQLException.class, logicalConnection::createClob); + assertThrows(SQLException.class, () -> logicalConnection.getClientInfo("mocksql")); + assertThrows(SQLException.class, logicalConnection::getClientInfo); + assertThrows(SQLException.class, () -> logicalConnection.createArrayOf("mock", null)); + assertThrows(SQLException.class, logicalConnection::getSchema); + assertThrows(SQLException.class, logicalConnection::getNetworkTimeout); + assertThrows(SQLException.class, () -> logicalConnection.isWrapperFor(Connection.class)); + assertThrows(SQLException.class, () -> logicalConnection.setAutoCommit(false)); + assertThrows(SQLException.class, logicalConnection::rollback); + assertThrows(SQLException.class, () -> logicalConnection.setReadOnly(false)); + assertThrows(SQLException.class, logicalConnection::clearWarnings); + assertThrows(SQLException.class, () -> logicalConnection.setSchema(null)); + assertThrows(SQLException.class, () -> logicalConnection.abort(null)); + assertThrows(SQLException.class, () -> logicalConnection.setNetworkTimeout(null, 1)); + + verify(snowflakePooledConnection, times(26)).fireConnectionErrorEvent(sqlException); + } + private SnowflakeConnectionPoolDataSource setProperties( SnowflakeConnectionPoolDataSource poolDataSource) { poolDataSource.setUrl(properties.get("uri")); From ab4880c4f0e95d4437602a2b98020b03e989d763 Mon Sep 17 00:00:00 2001 From: Juan Martinez Ramirez <126511805+sfc-gh-jmartinez@users.noreply.github.com> Date: Mon, 24 Jun 2024 00:26:48 -0600 Subject: [PATCH 05/10] SNOW-1016467: Enable matrix for Azure and GCP cloud providers. (#1777) --- .github/workflows/build-test.yml | 24 ++++++++---------- .github/workflows/parameters_azure.json.gpg | Bin 0 -> 408 bytes .../net/snowflake/client/RunningOnGCP.java | 12 +++++++++ .../client/jdbc/ConnectionFipsIT.java | 6 +++++ ci/test.sh | 1 + ci/test_windows.bat | 1 + .../net/snowflake/client/RunningNotOnAWS.java | 12 +++++++++ .../snowflake/client/RunningNotOnAzure.java | 12 +++++++++ .../net/snowflake/client/RunningNotOnGCP.java | 12 +++++++++ .../client/jdbc/ConnectionLatestIT.java | 7 +++++ 10 files changed, 74 insertions(+), 13 deletions(-) create mode 100644 .github/workflows/parameters_azure.json.gpg create mode 100644 FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnAWS.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnAzure.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnGCP.java diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index b3c7c8bc2..ad3d196af 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -38,20 +38,19 @@ jobs: test-windows: needs: build - name: ${{ matrix.cloud }} Windows java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: windows-latest strategy: fail-fast: false matrix: - cloud: [ 'AWS' ] - javaVersion: [ '8', '11', '17'] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] - additionalMavenProfile: ['', '-Dthin-jar'] + additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 - uses: actions/setup-java@v4 with: - java-version: ${{ matrix.javaVersion }} + java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - uses: actions/setup-python@v4 @@ -62,27 +61,26 @@ jobs: shell: cmd env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} - CLOUD_PROVIDER: ${{ matrix.cloud }} + CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} JDBC_TEST_CATEGORY: ${{ matrix.category }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: ci\\test_windows.bat test-mac: needs: build - name: ${{ matrix.cloud }} Mac java ${{ matrix.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} runs-on: macos-13 strategy: fail-fast: false matrix: - cloud: [ 'AWS' ] - javaVersion: [ '8', '11', '17'] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] - additionalMavenProfile: ['', '-Dthin-jar'] + additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 - uses: actions/setup-java@v4 with: - java-version: ${{ matrix.javaVersion }} + java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - uses: actions/setup-python@v4 @@ -95,7 +93,7 @@ jobs: shell: bash env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} - CLOUD_PROVIDER: ${{ matrix.cloud }} + CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} JDBC_TEST_CATEGORY: ${{ matrix.category }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: /usr/local/bin/bash ./ci/test_mac.sh @@ -108,7 +106,7 @@ jobs: fail-fast: false matrix: image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17' ] - cloud: [ 'AWS' ] + cloud: [ 'AWS', 'AZURE', 'GCP' ] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: ['', '-Dthin-jar'] steps: diff --git a/.github/workflows/parameters_azure.json.gpg b/.github/workflows/parameters_azure.json.gpg new file mode 100644 index 0000000000000000000000000000000000000000..ea6fbdb51a554341c8fd56c7194cf189c785425a GIT binary patch literal 408 zcmV;J0cZY<4Fm}T2zK4?Ssz{?nE%qi#sRsA{{$a3C#~;|2eijdZwWFgnch=ra(Pf5 z@Y?x+8lNiVyXXT;lL)@O0@+kRk$|)A{C|>p;BlXx!E^(?hK1r~Clxm64G%2Q;Jr>j zZVVzaK#q}nXf5W!;vKIcywMkq|Cs_`h%vwQ(&ESG8?0iLtiR_f6;LBc=joyokd2~I zrM}s&j0``@^*wj`(IkZcEZdg^6Rc8^8%wZfyoDTGJR>Q1&laRSM!vZTP)Zyn0;#esG@hq52T~bW)*|80Je?f?be-ucovXwqV;1_)R*o*A7Az$e~T8# zdiT3Eer!fllrK%f!6+-zQIG{VD)Z{e=*KxDe6y~Ms@w7aV-vbMf(G5H75QuD^Rjol ClFH!# literal 0 HcmV?d00001 diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java new file mode 100644 index 000000000..c902dc5f9 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningOnGCP implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && cloudProvider.equalsIgnoreCase("GCP"); + } +} diff --git a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java index a10924432..c1509a6a8 100644 --- a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java +++ b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java @@ -21,6 +21,7 @@ import javax.net.ssl.HttpsURLConnection; import net.snowflake.client.AbstractDriverIT; import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGCP; import net.snowflake.client.RunningOnGithubActions; import net.snowflake.client.category.TestCategoryFips; import net.snowflake.client.core.SecurityUtil; @@ -289,7 +290,12 @@ public void testConnectUsingKeyPair() throws Exception { DriverManager.getConnection(uri, properties).close(); } + /** + * Test case for connecting with FIPS and executing a query. + * Currently ignored execution on GCP due to exception thrown "SSlException Could not generate XDH keypair" + */ @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGCP.class) public void connectWithFipsAndQuery() throws SQLException { try (Connection con = getConnection()) { Statement statement = con.createStatement(); diff --git a/ci/test.sh b/ci/test.sh index 49a999d41..03c66c502 100755 --- a/ci/test.sh +++ b/ci/test.sh @@ -58,6 +58,7 @@ for name in "${!TARGET_TEST_IMAGES[@]}"; do -e BUILD_NUMBER \ -e JDBC_TEST_CATEGORY \ -e ADDITIONAL_MAVEN_PROFILE \ + -e CLOUD_PROVIDER \ -e is_old_driver \ --add-host=snowflake.reg.local:${IP_ADDR} \ --add-host=s3testaccount.reg.local:${IP_ADDR} \ diff --git a/ci/test_windows.bat b/ci/test_windows.bat index 4f32f7564..4a5a8ebe3 100644 --- a/ci/test_windows.bat +++ b/ci/test_windows.bat @@ -46,6 +46,7 @@ echo [INFO] Database: %SNOWFLAKE_TEST_DATABASE% echo [INFO] Schema: %SNOWFLAKE_TEST_SCHEMA% echo [INFO] Warehouse: %SNOWFLAKE_TEST_WAREHOUSE% echo [INFO] Role: %SNOWFLAKE_TEST_ROLE% +echo [INFO] PROVIDER: %CLOUD_PROVIDER% echo [INFO] Creating schema %SNOWFLAKE_TEST_SCHEMA% pushd %GITHUB_WORKSPACE%\ci\container diff --git a/src/test/java/net/snowflake/client/RunningNotOnAWS.java b/src/test/java/net/snowflake/client/RunningNotOnAWS.java new file mode 100644 index 000000000..70f54ab8f --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnAWS.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnAWS implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("AWS"); + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnAzure.java b/src/test/java/net/snowflake/client/RunningNotOnAzure.java new file mode 100644 index 000000000..e2a00966c --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnAzure.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnAzure implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("Azure"); + } +} diff --git a/src/test/java/net/snowflake/client/RunningNotOnGCP.java b/src/test/java/net/snowflake/client/RunningNotOnGCP.java new file mode 100644 index 000000000..7a5c7aafb --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnGCP.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client; + +/** Run tests only on specified cloud provider or ignore */ +public class RunningNotOnGCP implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); + return cloudProvider != null && !cloudProvider.equalsIgnoreCase("GCP"); + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index 0e7ab4648..a76f7fdf2 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -47,6 +47,7 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningNotOnAWS; import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; import net.snowflake.client.category.TestCategoryConnection; @@ -1169,7 +1170,13 @@ public void testReadOnly() throws Throwable { } } + /** + * Test case for the method testDownloadStreamWithFileNotFoundException. This test verifies that a + * SQLException is thrown when attempting to download a file that does not exist. It verifies that + * the error code is ErrorCode.S3_OPERATION_ERROR so only runs on AWS. + */ @Test + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnAWS.class) public void testDownloadStreamWithFileNotFoundException() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { From 66279cad1dbf232d51640c581540f4df22f527ea Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 24 Jun 2024 11:02:25 +0200 Subject: [PATCH 06/10] SNOW-1446174: Accept 513 next to 403 for OCSP tests (#1801) --- .../client/jdbc/ConnectionWithOCSPModeIT.java | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java index 00978b0d5..04c9c9311 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java @@ -24,6 +24,7 @@ import net.snowflake.client.category.TestCategoryConnection; import net.snowflake.client.core.SFOCSPException; import net.snowflake.client.core.SFTrustManager; +import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; import org.junit.Ignore; @@ -108,7 +109,7 @@ public void testValidityExpiredOCSPResponseFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -146,7 +147,7 @@ public void testNoOCSPResponderURLFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -183,7 +184,7 @@ public void testValidityExpiredOCSPResponseInsecure() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -198,7 +199,7 @@ public void testCertAttachedInvalidFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -234,7 +235,7 @@ public void testUnknownOCSPCertFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -293,7 +294,7 @@ public void testOCSPCacheServerTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -332,7 +333,7 @@ public void testOCSPResponderTimeoutFailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -368,7 +369,7 @@ public void testOCSPResponder403FailOpen() { } catch (SQLException ex) { assertThat(ex, instanceOf(SnowflakeSQLException.class)); assertThat(ex.getErrorCode(), equalTo(NETWORK_ERROR.getMessageCode())); - assertThat(ex.getMessage(), containsString("HTTP status=403")); + assertThat(ex.getMessage(), httpStatus403Or513()); assertNull(ex.getCause()); } } @@ -429,4 +430,8 @@ public void testWrongHost() { instanceOf(SSLHandshakeException.class))); } } + + private static Matcher httpStatus403Or513() { + return anyOf(containsString("HTTP status=403"), containsString("HTTP status=513")); + } } From 6d11e4ffb9536b1e4eee3d50d1904c3d0b642bf4 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 24 Jun 2024 15:16:46 +0200 Subject: [PATCH 07/10] SNOW-1495591: Support getObject on vector column (#1799) --- .../client/core/SFArrowResultSet.java | 6 ++- .../client/core/SFJsonResultSet.java | 2 + .../core/arrow/VectorTypeConverter.java | 7 ++- .../client/jdbc/ResultSetVectorLatestIT.java | 49 +++++++++++++++++++ 4 files changed, 62 insertions(+), 2 deletions(-) diff --git a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java index 02f16fff1..74e4c41db 100644 --- a/src/main/java/net/snowflake/client/core/SFArrowResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFArrowResultSet.java @@ -37,6 +37,7 @@ import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; +import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryData; import net.snowflake.client.jdbc.telemetry.TelemetryField; @@ -559,6 +560,10 @@ public Timestamp getTimestamp(int columnIndex, TimeZone tz) throws SFException { @Override public Object getObject(int columnIndex) throws SFException { + int type = resultSetMetaData.getColumnType(columnIndex); + if (type == SnowflakeUtil.EXTRA_TYPES_VECTOR) { + return getString(columnIndex); + } ArrowVectorConverter converter = currentChunkIterator.getCurrentConverter(columnIndex - 1); int index = currentChunkIterator.getCurrentRowInRecordBatch(); wasNull = converter.isNull(index); @@ -566,7 +571,6 @@ public Object getObject(int columnIndex) throws SFException { converter.setUseSessionTimezone(useSessionTimezone); converter.setSessionTimeZone(sessionTimeZone); Object obj = converter.toObject(index); - int type = resultSetMetaData.getColumnType(columnIndex); boolean isStructuredType = resultSetMetaData.isStructuredTypeColumn(columnIndex); if (type == Types.STRUCT && isStructuredType) { if (converter instanceof VarCharConverter) { diff --git a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java index 2232eea00..1011870df 100644 --- a/src/main/java/net/snowflake/client/core/SFJsonResultSet.java +++ b/src/main/java/net/snowflake/client/core/SFJsonResultSet.java @@ -18,6 +18,7 @@ import net.snowflake.client.core.json.Converters; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.FieldMetadata; +import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -53,6 +54,7 @@ public Object getObject(int columnIndex) throws SFException { switch (type) { case Types.VARCHAR: case Types.CHAR: + case SnowflakeUtil.EXTRA_TYPES_VECTOR: return getString(columnIndex); case Types.BINARY: diff --git a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java index 2e9dbd82d..ae7a492a0 100644 --- a/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java +++ b/src/main/java/net/snowflake/client/core/arrow/VectorTypeConverter.java @@ -1,5 +1,6 @@ package net.snowflake.client.core.arrow; +import java.util.List; import net.snowflake.client.core.DataConversionContext; import net.snowflake.client.core.SFException; import net.snowflake.client.jdbc.SnowflakeType; @@ -22,6 +23,10 @@ public Object toObject(int index) throws SFException { @Override public String toString(int index) throws SFException { - return vector.getObject(index).toString(); + List object = vector.getObject(index); + if (object == null) { + return null; + } + return object.toString(); } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java index 5af26db35..bbc145516 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java @@ -151,10 +151,59 @@ public void testGetFloatVectorFromTable() throws SQLException { } } + /** Added in > 3.16.1 */ + @Test + public void testGetVectorViaGetStringIsEqualToTheGetObject() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + enforceQueryResultFormat(stmt); + Integer[] intVector = {-1, 5}; + Float[] floatVector = {-1.2f, 5.1f, 15.87f}; + try (ResultSet resultSet = + stmt.executeQuery( + "select " + + vectorToString(intVector, "int") + + ", " + + vectorToString(floatVector, "float") + + ", " + + nullVectorToString("int") + + ", " + + nullVectorToString("float"))) { + + assertTrue(resultSet.next()); + assertGetObjectAndGetStringBeTheSame(resultSet, "[-1,5]", 1); + String floatArrayRepresentation = + "json".equals(queryResultFormat) + // in json we have slightly different format that we accept in the result + ? "[-1.200000,5.100000,15.870000]" + : "[-1.2,5.1,15.87]"; + assertGetObjectAndGetStringBeTheSame(resultSet, floatArrayRepresentation, 2); + assertGetObjectAndGetStringAreNull(resultSet, 3); + assertGetObjectAndGetStringAreNull(resultSet, 4); + } + } + } + + private static void assertGetObjectAndGetStringBeTheSame( + ResultSet resultSet, String intArrayRepresentation, int columnIndex) throws SQLException { + assertEquals(intArrayRepresentation, resultSet.getString(columnIndex)); + assertEquals(intArrayRepresentation, resultSet.getObject(columnIndex)); + } + + private static void assertGetObjectAndGetStringAreNull(ResultSet resultSet, int columnIndex) + throws SQLException { + assertNull(resultSet.getString(columnIndex)); + assertNull(resultSet.getObject(columnIndex)); + } + private String vectorToString(T[] vector, String vectorType) { return Arrays.toString(vector) + "::vector(" + vectorType + ", " + vector.length + ")"; } + private String nullVectorToString(String vectorType) { + return "null::vector(" + vectorType + ", 2)"; + } + private void enforceQueryResultFormat(Statement stmt) throws SQLException { String sql = String.format( From ccee1b1dead6d2cb39a270b1cd5bfea874c2b8cc Mon Sep 17 00:00:00 2001 From: Przemyslaw Motacki Date: Mon, 24 Jun 2024 16:33:46 +0200 Subject: [PATCH 08/10] SNOW-1454054 - Read connection configuration from file. (#1780) * SNOW-1454054 - Read connection configuration from file. --- parent-pom.xml | 4 + .../client/config/ConnectionParameters.java | 26 +++ .../config/SFConnectionConfigParser.java | 149 ++++++++++++++++++ .../client/jdbc/SnowflakeDriver.java | 49 +++++- .../client/jdbc/SnowflakeSQLException.java | 4 + .../net/snowflake/client/RunningNotOnWin.java | 9 ++ .../config/SFConnectionConfigParserTest.java | 133 ++++++++++++++++ .../FileConnectionConfigurationLatestIT.java | 52 ++++++ thin_public_pom.xml | 4 + 9 files changed, 426 insertions(+), 4 deletions(-) create mode 100644 src/main/java/net/snowflake/client/config/ConnectionParameters.java create mode 100644 src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java create mode 100644 src/test/java/net/snowflake/client/RunningNotOnWin.java create mode 100644 src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java create mode 100644 src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java diff --git a/parent-pom.xml b/parent-pom.xml index 1c5ab3c2f..8642fe429 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -528,6 +528,10 @@ com.fasterxml.jackson.core jackson-databind + + com.fasterxml.jackson.dataformat + jackson-dataformat-toml + com.google.api gax diff --git a/src/main/java/net/snowflake/client/config/ConnectionParameters.java b/src/main/java/net/snowflake/client/config/ConnectionParameters.java new file mode 100644 index 000000000..5fa97ac91 --- /dev/null +++ b/src/main/java/net/snowflake/client/config/ConnectionParameters.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.config; + +import java.util.Properties; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; + +@SnowflakeJdbcInternalApi +public class ConnectionParameters { + private final String url; + private final Properties params; + + public ConnectionParameters(String uri, Properties params) { + this.url = uri; + this.params = params; + } + + public String getUrl() { + return url; + } + + public Properties getParams() { + return params; + } +} diff --git a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java new file mode 100644 index 000000000..9040fa392 --- /dev/null +++ b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java @@ -0,0 +1,149 @@ +package net.snowflake.client.config; + +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetEnv; + +import com.fasterxml.jackson.dataformat.toml.TomlMapper; +import com.google.common.base.Strings; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import net.snowflake.client.core.Constants; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; + +@SnowflakeJdbcInternalApi +public class SFConnectionConfigParser { + + private static final SFLogger logger = SFLoggerFactory.getLogger(SFConnectionConfigParser.class); + private static final TomlMapper mapper = new TomlMapper(); + public static final String SNOWFLAKE_HOME_KEY = "SNOWFLAKE_HOME"; + public static final String SNOWFLAKE_DIR = ".snowflake"; + public static final String SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY = + "SNOWFLAKE_DEFAULT_CONNECTION_NAME"; + public static final String DEFAULT = "default"; + public static final String SNOWFLAKE_TOKEN_FILE_PATH = "/snowflake/session/token"; + + private static Map loadDefaultConnectionConfiguration( + String defaultConnectionName) throws SnowflakeSQLException { + String configDirectory = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_HOME_KEY)) + .orElse(Paths.get(System.getProperty("user.home"), SNOWFLAKE_DIR).toString()); + Path configFilePath = Paths.get(configDirectory, "connections.toml"); + + if (Files.exists(configFilePath)) { + logger.debug( + "Reading connection parameters from file using key: {} []", + configFilePath, + defaultConnectionName); + Map parametersMap = readParametersMap(configFilePath); + Map defaultConnectionParametersMap = parametersMap.get(defaultConnectionName); + return defaultConnectionParametersMap; + } else { + logger.debug("Connection configuration file does not exist"); + return new HashMap<>(); + } + } + + private static Map readParametersMap(Path configFilePath) + throws SnowflakeSQLException { + try { + File file = new File(configFilePath.toUri()); + varifyFilePermissionSecure(configFilePath); + return mapper.readValue(file, Map.class); + } catch (IOException ex) { + throw new SnowflakeSQLException(ex, "Problem during reading a configuration file."); + } + } + + private static void varifyFilePermissionSecure(Path configFilePath) + throws IOException, SnowflakeSQLException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + PosixFileAttributeView posixFileAttributeView = + Files.getFileAttributeView(configFilePath, PosixFileAttributeView.class); + if (!posixFileAttributeView.readAttributes().permissions().stream() + .allMatch( + o -> + Arrays.asList(PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_READ) + .contains(o))) { + logger.error( + "Reading from file {} is not safe because of insufficient permissions", configFilePath); + throw new SnowflakeSQLException( + String.format( + "Reading from file %s is not safe because of insufficient permissions", + configFilePath)); + } + } + } + + public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException { + String defaultConnectionName = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT); + Map fileConnectionConfiguration = + loadDefaultConnectionConfiguration(defaultConnectionName); + + if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) { + Properties conectionProperties = new Properties(); + conectionProperties.putAll(fileConnectionConfiguration); + + String url = + Optional.ofNullable(fileConnectionConfiguration.get("account")) + .map(ac -> createUrl(ac, fileConnectionConfiguration)) + .orElse(null); + logger.debug("Url created using parameters from connection configuration file: {}", url); + + if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) + && fileConnectionConfiguration.get("token") == null) { + Path path = + Paths.get( + Optional.ofNullable(fileConnectionConfiguration.get("token_file_path")) + .orElse(SNOWFLAKE_TOKEN_FILE_PATH)); + logger.debug("Token used in connect is read from file: {}", path); + try { + String token = new String(Files.readAllBytes(path), Charset.defaultCharset()); + if (!token.isEmpty()) { + putPropertyIfNotNull(conectionProperties, "token", token.trim()); + } else { + logger.warn("The token has empty value"); + } + } catch (IOException ex) { + throw new SnowflakeSQLException(ex, "There is a problem during reading token from file"); + } + } + return new ConnectionParameters(url, conectionProperties); + } else { + return null; + } + } + + private static String createUrl(String account, Map fileConnectionConfiguration) { + String host = String.format("%s.snowflakecomputing.com", account); + String port = fileConnectionConfiguration.get("port"); + String protocol = fileConnectionConfiguration.get("protocol"); + if (Strings.isNullOrEmpty(port)) { + if ("https".equals(protocol)) { + port = "443"; + } else { + port = "80"; + } + } + return String.format("jdbc:snowflake://%s:%s", host, port); + } + + private static void putPropertyIfNotNull(Properties props, Object key, Object value) { + if (key != null && value != null) { + props.put(key, value); + } + } +} diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index 6baba4a57..73f201ac2 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -14,7 +14,12 @@ import java.sql.SQLFeatureNotSupportedException; import java.util.List; import java.util.Properties; +import net.snowflake.client.config.ConnectionParameters; +import net.snowflake.client.config.SFConnectionConfigParser; import net.snowflake.client.core.SecurityUtil; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; +import net.snowflake.client.log.SFLogger; +import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.ResourceBundleManager; import net.snowflake.common.core.SqlState; @@ -26,6 +31,8 @@ * loading */ public class SnowflakeDriver implements Driver { + private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeDriver.class); + public static final String AUTO_CONNECTION_STRING_PREFIX = "jdbc:snowflake:auto"; static SnowflakeDriver INSTANCE; public static final Properties EMPTY_PROPERTIES = new Properties(); @@ -200,18 +207,52 @@ public boolean acceptsURL(String url) { */ @Override public Connection connect(String url, Properties info) throws SQLException { - if (url == null) { + ConnectionParameters connectionParameters = + overrideByFileConnectionParametersIfAutoConfiguration(url, info); + + if (connectionParameters.getUrl() == null) { // expected return format per the JDBC spec for java.sql.Driver#connect() throw new SnowflakeSQLException("Unable to connect to url of 'null'."); } - if (!SnowflakeConnectString.hasSupportedPrefix(url)) { + if (!SnowflakeConnectString.hasSupportedPrefix(connectionParameters.getUrl())) { return null; // expected return format per the JDBC spec for java.sql.Driver#connect() } - SnowflakeConnectString conStr = SnowflakeConnectString.parse(url, info); + SnowflakeConnectString conStr = + SnowflakeConnectString.parse( + connectionParameters.getUrl(), connectionParameters.getParams()); if (!conStr.isValid()) { throw new SnowflakeSQLException("Connection string is invalid. Unable to parse."); } - return new SnowflakeConnectionV1(url, info); + return new SnowflakeConnectionV1( + connectionParameters.getUrl(), connectionParameters.getParams()); + } + + private static ConnectionParameters overrideByFileConnectionParametersIfAutoConfiguration( + String url, Properties info) throws SnowflakeSQLException { + if (url != null && url.contains(AUTO_CONNECTION_STRING_PREFIX)) { + // Connect using connection configuration file + ConnectionParameters connectionParameters = + SFConnectionConfigParser.buildConnectionParameters(); + if (connectionParameters == null) { + throw new SnowflakeSQLException( + "Unavailable connection configuration parameters expected for auto configuration using file"); + } + return connectionParameters; + } else { + return new ConnectionParameters(url, info); + } + } + + /** + * Connect method using connection configuration file + * + * @return connection + * @throws SQLException if failed to create a snowflake connection + */ + @SnowflakeJdbcInternalApi + public Connection connect() throws SQLException { + logger.debug("Execute internal method connect() without parameters"); + return connect(AUTO_CONNECTION_STRING_PREFIX, null); } @Override diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java index 660e83134..a88829ec6 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeSQLException.java @@ -172,6 +172,10 @@ public SnowflakeSQLException(String reason) { super(reason); } + public SnowflakeSQLException(Throwable ex, String message) { + super(message, ex); + } + public String getQueryId() { return queryId; } diff --git a/src/test/java/net/snowflake/client/RunningNotOnWin.java b/src/test/java/net/snowflake/client/RunningNotOnWin.java new file mode 100644 index 000000000..ce5cdf7d1 --- /dev/null +++ b/src/test/java/net/snowflake/client/RunningNotOnWin.java @@ -0,0 +1,9 @@ +package net.snowflake.client; + +import net.snowflake.client.core.Constants; + +public class RunningNotOnWin implements ConditionalIgnoreRule.IgnoreCondition { + public boolean isSatisfied() { + return Constants.getOS() != Constants.OS.WINDOWS; + } +} diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java new file mode 100644 index 000000000..e68e68fa0 --- /dev/null +++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java @@ -0,0 +1,133 @@ +package net.snowflake.client.config; + +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_HOME_KEY; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assume.assumeFalse; + +import com.fasterxml.jackson.dataformat.toml.TomlMapper; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import net.snowflake.client.RunningNotOnLinuxMac; +import net.snowflake.client.core.Constants; +import net.snowflake.client.jdbc.SnowflakeSQLException; +import net.snowflake.client.jdbc.SnowflakeUtil; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class SFConnectionConfigParserTest { + + private Path tempPath = null; + private TomlMapper tomlMapper = new TomlMapper(); + + @Before + public void setUp() throws IOException { + tempPath = Files.createTempDirectory(".snowflake"); + } + + @After + public void close() throws IOException { + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_HOME_KEY); + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + Files.walk(tempPath).map(Path::toFile).forEach(File::delete); + Files.delete(tempPath); + } + + @Test + public void testLoadSFConnectionConfigWrongConfigurationName() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "unknown"); + prepareConnectionConfigurationTomlFile(null, true); + ConnectionParameters connectionParameters = + SFConnectionConfigParser.buildConnectionParameters(); + assertNull(connectionParameters); + } + + @Test + public void testLoadSFConnectionConfigInValidPath() throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, Paths.get("unknownPath").toString()); + prepareConnectionConfigurationTomlFile(null, true); + assertNull(SFConnectionConfigParser.buildConnectionParameters()); + } + + @Test + public void testLoadSFConnectionConfigWithTokenFromFile() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), true); + + ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters(); + assertNotNull(data); + assertEquals(tokenFile.toString(), data.getParams().get("token_file_path")); + assertEquals("testToken", data.getParams().get("token")); + } + + @Test + public void testThrowErrorWhenWrongPermissionsForTokenFile() throws IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), false); + assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assertThrows( + SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); + } + + private void prepareConnectionConfigurationTomlFile( + Map moreParameters, boolean onlyUserPermission) throws IOException { + Path path = Paths.get(tempPath.toString(), "connections.toml"); + Path filePath = createFilePathWithPermission(path, onlyUserPermission); + File file = filePath.toFile(); + + Map configuration = new HashMap(); + Map configurationParams = new HashMap(); + configurationParams.put("account", "snowaccount.us-west-2.aws"); + configurationParams.put("user", "user1"); + configurationParams.put("token", "testToken"); + configurationParams.put("port", "443"); + + if (moreParameters != null) { + moreParameters.forEach((k, v) -> configurationParams.put(k, v)); + } + configuration.put("default", configurationParams); + tomlMapper.writeValue(file, configuration); + + if (configurationParams.containsKey("token_file_path")) { + Path tokenFilePath = + createFilePathWithPermission( + Paths.get(configurationParams.get("token_file_path").toString()), onlyUserPermission); + Files.write(tokenFilePath, "token_from_file".getBytes()); + } + } + + private Path createFilePathWithPermission(Path path, boolean onlyUserPermission) + throws IOException { + if (Constants.getOS() != Constants.OS.WINDOWS) { + FileAttribute> fileAttribute = + onlyUserPermission + ? PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-------")) + : PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrw----")); + return Files.createFile(path, fileAttribute); + } else { + return Files.createFile(path); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java new file mode 100644 index 000000000..734446c92 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.After; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +/** This test could be run only on environment where file connection.toml is configured */ +@Ignore +public class FileConnectionConfigurationLatestIT { + + @After + public void cleanUp() { + SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + } + + @Test + public void testThrowExceptionIfConfigurationDoesNotExist() { + SnowflakeUtil.systemSetEnv("SNOWFLAKE_DEFAULT_CONNECTION_NAME", "non-existent"); + Assert.assertThrows(SnowflakeSQLException.class, () -> SnowflakeDriver.INSTANCE.connect()); + } + + @Test + public void testSimpleConnectionUsingFileConfigurationToken() throws SQLException { + verifyConnetionToSnowflake("aws-oauth"); + } + + @Test + public void testSimpleConnectionUsingFileConfigurationTokenFromFile() throws SQLException { + verifyConnetionToSnowflake("aws-oauth-file"); + } + + private static void verifyConnetionToSnowflake(String connectionName) throws SQLException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, connectionName); + try (Connection con = + DriverManager.getConnection(SnowflakeDriver.AUTO_CONNECTION_STRING_PREFIX, null); + Statement statement = con.createStatement(); + ResultSet resultSet = statement.executeQuery("show parameters")) { + Assert.assertTrue(resultSet.next()); + } + } +} diff --git a/thin_public_pom.xml b/thin_public_pom.xml index 239e31e34..e15a4e3c4 100644 --- a/thin_public_pom.xml +++ b/thin_public_pom.xml @@ -140,6 +140,10 @@ com.fasterxml.jackson.core jackson-databind + + com.fasterxml.jackson.dataformat + jackson-dataformat-toml + com.google.api gax From a4db3096c3282eb0c8aa7b86229d063a4f9ae694 Mon Sep 17 00:00:00 2001 From: Piotr Bulawa Date: Tue, 25 Jun 2024 09:57:34 +0200 Subject: [PATCH 09/10] SNOW-1490931: Preparation for Java 21 (#1796) --- .github/workflows/build-test.yml | 6 +++--- Jenkinsfile | 2 +- ci/_init.sh | 5 ++++- ci/image/Dockerfile.jdbc-centos7-openjdk-test | 20 ++++++++++++++++++- parent-pom.xml | 7 +++++++ 5 files changed, 34 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index ad3d196af..90b03180f 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -43,7 +43,7 @@ jobs: strategy: fail-fast: false matrix: - runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: [''] steps: @@ -73,7 +73,7 @@ jobs: strategy: fail-fast: false matrix: - runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}] + runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: [''] steps: @@ -105,7 +105,7 @@ jobs: strategy: fail-fast: false matrix: - image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17' ] + image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17', 'jdbc-centos7-openjdk21' ] cloud: [ 'AWS', 'AZURE', 'GCP' ] category: ['TestCategoryResultSet,TestCategoryOthers,TestCategoryLoader', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryArrow,TestCategoryCore', 'TestCategoryFips'] additionalMavenProfile: ['', '-Dthin-jar'] diff --git a/Jenkinsfile b/Jenkinsfile index 5e62aab1b..8e5925b8c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -56,7 +56,7 @@ timestamps { e.printStackTrace() } - jdkToParams = ['openjdk8': 'jdbc-centos7-openjdk8', 'openjdk11': 'jdbc-centos7-openjdk11', 'openjdk17': 'jdbc-centos7-openjdk17'].collectEntries { jdk, image -> + jdkToParams = ['openjdk8': 'jdbc-centos7-openjdk8', 'openjdk11': 'jdbc-centos7-openjdk11', 'openjdk17': 'jdbc-centos7-openjdk17', 'openjdk21': 'jdbc-centos7-openjdk21'].collectEntries { jdk, image -> return [(jdk): [ string(name: 'client_git_branch', value: scmInfo.GIT_BRANCH), string(name: 'client_git_commit', value: scmInfo.GIT_COMMIT), diff --git a/ci/_init.sh b/ci/_init.sh index c91f03c31..5df299949 100755 --- a/ci/_init.sh +++ b/ci/_init.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/usr/local/bin/env bash set -e export PLATFORM=$(echo $(uname) | tr '[:upper:]' '[:lower:]') @@ -23,6 +23,7 @@ declare -A TEST_IMAGE_NAMES=( [$DRIVER_NAME-centos7-openjdk8]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk8-test:$TEST_IMAGE_VERSION [$DRIVER_NAME-centos7-openjdk11]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk11-test:$TEST_IMAGE_VERSION [$DRIVER_NAME-centos7-openjdk17]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk17-test:$TEST_IMAGE_VERSION + [$DRIVER_NAME-centos7-openjdk21]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-centos7-openjdk21-test:$TEST_IMAGE_VERSION ) export TEST_IMAGE_NAMES @@ -30,11 +31,13 @@ declare -A TEST_IMAGE_DOCKERFILES=( [$DRIVER_NAME-centos7-openjdk8]=jdbc-centos7-openjdk-test [$DRIVER_NAME-centos7-openjdk11]=jdbc-centos7-openjdk-test [$DRIVER_NAME-centos7-openjdk17]=jdbc-centos7-openjdk-test + [$DRIVER_NAME-centos7-openjdk21]=jdbc-centos7-openjdk-test ) declare -A TEST_IMAGE_BUILD_ARGS=( [$DRIVER_NAME-centos7-openjdk8]="--target jdbc-centos7-openjdk-yum --build-arg=JDK_PACKAGE=java-1.8.0-openjdk-devel" [$DRIVER_NAME-centos7-openjdk11]="--target jdbc-centos7-openjdk-yum --build-arg=JDK_PACKAGE=java-11-openjdk-devel" # pragma: allowlist secret [$DRIVER_NAME-centos7-openjdk17]="--target jdbc-centos7-openjdk17" + [$DRIVER_NAME-centos7-openjdk21]="--target jdbc-centos7-openjdk21" ) diff --git a/ci/image/Dockerfile.jdbc-centos7-openjdk-test b/ci/image/Dockerfile.jdbc-centos7-openjdk-test index 15e351530..e6adfb975 100644 --- a/ci/image/Dockerfile.jdbc-centos7-openjdk-test +++ b/ci/image/Dockerfile.jdbc-centos7-openjdk-test @@ -88,4 +88,22 @@ RUN export JAVA_HOME=/opt/jdk-17 && \ -Dnot-self-contained-jar \ --batch-mode --fail-never compile && \ mv $HOME/.m2 /home/user && \ - chmod -R 777 /home/user/.m2 \ No newline at end of file + chmod -R 777 /home/user/.m2 + +###### OpenJDK 21 from archive (not available in yum) +FROM jdbc-centos7-openjdk-base AS jdbc-centos7-openjdk21 + +# Java +RUN curl -o - https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz | tar xfz - -C /opt && \ + ln -s /opt/jdk-21.0.2 /opt/jdk-21 + +RUN sed -i /usr/local/bin/entrypoint.sh -e '/^exec/i export JAVA_HOME=/opt/jdk-21' +RUN sed -i /usr/local/bin/entrypoint.sh -e '/^exec/i export PATH=$JAVA_HOME/bin:$PATH' + +RUN export JAVA_HOME=/opt/jdk-21 && \ + cd /root && \ + mvn -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -Dnot-self-contained-jar \ + --batch-mode --fail-never compile && \ + mv $HOME/.m2 /home/user && \ + chmod -R 777 /home/user/.m2 diff --git a/parent-pom.xml b/parent-pom.xml index 8642fe429..db166fa9d 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -28,6 +28,7 @@ 1.74 1.0.2.4 1.0.5 + 1.14.17 1.1 3.33.0 1.2 @@ -482,6 +483,12 @@ ${mockito.version} test + + net.bytebuddy + byte-buddy + ${bytebuddy.version} + test + org.awaitility awaitility From d4504f8b7780a2504712b21569bb29ccaf755073 Mon Sep 17 00:00:00 2001 From: Dawid Heyman Date: Fri, 28 Jun 2024 13:30:37 +0200 Subject: [PATCH 10/10] SNOW-1163203: Increased Max LOB size in metadata (#1806) --- .../client/core/ObjectMapperFactory.java | 5 +- .../jdbc/SnowflakeDatabaseMetaData.java | 22 +- .../client/jdbc/SnowflakeStatementV1.java | 5 +- .../client/jdbc/DatabaseMetaDataIT.java | 8 +- .../jdbc/DatabaseMetaDataInternalIT.java | 9 +- .../DatabaseMetaDataInternalLatestIT.java | 8 +- .../client/jdbc/DatabaseMetaDataLatestIT.java | 20 +- .../client/jdbc/LobSizeLatestIT.java | 253 +++++++++++++++++ .../client/jdbc/MaxLobSizeLatestIT.java | 262 ++---------------- .../client/jdbc/ResultSetLatestIT.java | 9 +- 10 files changed, 336 insertions(+), 265 deletions(-) create mode 100644 src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java diff --git a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java index e7b1056ba..0f9a7b01f 100644 --- a/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java +++ b/src/main/java/net/snowflake/client/core/ObjectMapperFactory.java @@ -11,8 +11,9 @@ */ public class ObjectMapperFactory { @SnowflakeJdbcInternalApi - // Snowflake allows up to 16M string size and returns base64 encoded value that makes it up to 23M - public static final int DEFAULT_MAX_JSON_STRING_LEN = 23_000_000; + // Snowflake allows up to 128M (after updating Max LOB size) string size and returns base64 + // encoded value that makes it up to 180M + public static final int DEFAULT_MAX_JSON_STRING_LEN = 180_000_000; @SnowflakeJdbcInternalApi public static final String MAX_JSON_STRING_LENGTH_JVM = diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java index b50646ea7..acfb3e4f7 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDatabaseMetaData.java @@ -35,6 +35,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; import net.snowflake.client.core.ObjectMapperFactory; @@ -140,6 +141,12 @@ public class SnowflakeDatabaseMetaData implements DatabaseMetaData { "VECTOR", "VIEW"); + private static final String MAX_VARCHAR_BINARY_SIZE_PARAM_NAME = + "VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"; + + // Defaults to 16MB + private static final int DEFAULT_MAX_LOB_SIZE = 16777216; + private final Connection connection; private final SFBaseSession session; @@ -911,14 +918,17 @@ public boolean supportsOpenStatementsAcrossRollback() throws SQLException { public int getMaxBinaryLiteralLength() throws SQLException { logger.trace("int getMaxBinaryLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); - return 8388608; + return getMaxCharLiteralLength() / 2; // hex instead of octal, thus divided by 2 } @Override public int getMaxCharLiteralLength() throws SQLException { logger.trace("int getMaxCharLiteralLength()", false); raiseSQLExceptionIfConnectionIsClosed(); - return 16777216; + Optional maxLiteralLengthFromSession = + Optional.ofNullable( + (Integer) session.getOtherParameter(MAX_VARCHAR_BINARY_SIZE_PARAM_NAME)); + return maxLiteralLengthFromSession.orElse(DEFAULT_MAX_LOB_SIZE); } @Override @@ -1348,9 +1358,9 @@ else if (i == 0) { typeName.substring(typeName.indexOf('(') + 1, typeName.indexOf(')'))); nextRow[16] = char_octet_len; } else if (type == Types.CHAR || type == Types.VARCHAR) { - nextRow[16] = 16777216; + nextRow[16] = getMaxCharLiteralLength(); } else if (type == Types.BINARY || type == Types.VARBINARY) { - nextRow[16] = 8388608; + nextRow[16] = getMaxBinaryLiteralLength(); } } else { nextRow[16] = null; @@ -3570,9 +3580,9 @@ public ResultSet getFunctionColumns( typeName.substring(typeName.indexOf('(') + 1, typeName.indexOf(')'))); nextRow[13] = char_octet_len; } else if (type == Types.CHAR || type == Types.VARCHAR) { - nextRow[13] = 16777216; + nextRow[13] = getMaxCharLiteralLength(); } else if (type == Types.BINARY || type == Types.VARBINARY) { - nextRow[13] = 8388608; + nextRow[13] = getMaxBinaryLiteralLength(); } } else { nextRow[13] = null; diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java index 3d8b8c464..5016c175b 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeStatementV1.java @@ -68,9 +68,6 @@ class SnowflakeStatementV1 implements Statement, SnowflakeStatement { // timeout in seconds private int queryTimeout = 0; - // max field size limited to 16MB - private final int maxFieldSize = 16777216; - SFBaseStatement sfBaseStatement; private boolean poolable; @@ -640,7 +637,7 @@ public ResultSet getGeneratedKeys() throws SQLException { public int getMaxFieldSize() throws SQLException { logger.trace("getMaxFieldSize()", false); raiseSQLExceptionIfStatementIsClosed(); - return maxFieldSize; + return connection.getMetaData().getMaxCharLiteralLength(); } @Override diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java index 0a52b3df1..2ea144f3c 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java @@ -59,6 +59,10 @@ public class DatabaseMetaDataIT extends BaseJDBCTest { + " $$\n" + " ;"; + public static final int EXPECTED_MAX_CHAR_LENGTH = 16777216; + + public static final int EXPECTED_MAX_BINARY_LENGTH = 8388608; + @Test public void testGetConnection() throws SQLException { try (Connection connection = getConnection()) { @@ -698,9 +702,9 @@ public void testDatabaseMetadata() throws SQLException { assertEquals("$", metaData.getExtraNameCharacters()); assertEquals("\"", metaData.getIdentifierQuoteString()); assertEquals(0, getSizeOfResultSet(metaData.getIndexInfo(null, null, null, true, true))); - assertEquals(8388608, metaData.getMaxBinaryLiteralLength()); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, metaData.getMaxBinaryLiteralLength()); assertEquals(255, metaData.getMaxCatalogNameLength()); - assertEquals(16777216, metaData.getMaxCharLiteralLength()); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, metaData.getMaxCharLiteralLength()); assertEquals(255, metaData.getMaxColumnNameLength()); assertEquals(0, metaData.getMaxColumnsInGroupBy()); assertEquals(0, metaData.getMaxColumnsInIndex()); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java index d0eed3c5f..ec590b066 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java @@ -3,6 +3,8 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -59,7 +61,7 @@ static void initMetaData(Connection con) throws SQLException { st.execute("create or replace database JDBC_DB2"); st.execute("create or replace schema JDBC_SCHEMA21"); st.execute("create or replace table JDBC_TBL211(colA string)"); - st.execute("create or replace table JDBC_BIN(bin1 binary, bin2 binary(100))"); + st.execute("create or replace table JDBC_BIN(bin1 binary(8388608), bin2 binary(100))"); // st.execute("create or replace table JDBC_TBL211(colA string(25) NOT NULL DEFAULT // 'defstring')"); @@ -111,7 +113,7 @@ public void testGetColumn() throws SQLException { resultSet = databaseMetaData.getColumns(null, "JDBC_SCHEMA21", "JDBC_BIN", "BIN1"); resultSet.next(); - assertEquals(8388608, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(1, getSizeOfResultSet(resultSet) + 1); resultSet = databaseMetaData.getColumns(null, "JDBC_SCHEMA21", "JDBC_BIN", "BIN2"); @@ -187,8 +189,7 @@ public void testGetFunctions() throws SQLException { // test each column return the right value resultSet = databaseMetaData.getFunctions("JDBC_DB1", "JDBC_SCHEMA11", "JDBCFUNCTEST111"); - DatabaseMetaDataIT.verifyResultSetMetaDataColumns( - resultSet, DBMetadataResultSetMetadata.GET_FUNCTIONS); + verifyResultSetMetaDataColumns(resultSet, DBMetadataResultSetMetadata.GET_FUNCTIONS); resultSet.next(); assertEquals("JDBC_DB1", resultSet.getString("FUNCTION_CAT")); assertEquals("JDBC_SCHEMA11", resultSet.getString("FUNCTION_SCHEM")); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java index 97e67683a..15701ca17 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java @@ -91,7 +91,7 @@ public void testGetFunctionColumns() throws SQLException { + "sharedCol decimal)"); statement.execute( "create or replace function JDBC_DB1.JDBC_SCHEMA11.FUNC112 " - + "() RETURNS TABLE(colA string, colB decimal, bin2 binary, sharedCol decimal) COMMENT= 'returns " + + "() RETURNS TABLE(colA string(16777216), colB decimal, bin2 binary(8388608), sharedCol decimal) COMMENT= 'returns " + "table of 4 columns'" + " as 'select JDBC_DB1.JDBC_SCHEMA11.JDBC_TBL111.colA, JDBC_DB1.JDBC_SCHEMA11.JDBC_TBL111.colB, " + "JDBC_DB1.JDBC_SCHEMA11.BIN_TABLE.bin2, JDBC_DB1.JDBC_SCHEMA11.BIN_TABLE.sharedCol from JDBC_DB1" @@ -173,7 +173,8 @@ public void testGetFunctionColumns() throws SQLException { assertEquals(10, resultSet.getInt("RADIX")); assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals( + databaseMetaData.getMaxCharLiteralLength(), resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); assertEquals("", resultSet.getString("IS_NULLABLE")); assertEquals( @@ -213,7 +214,8 @@ public void testGetFunctionColumns() throws SQLException { assertEquals(10, resultSet.getInt("RADIX")); assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); - assertEquals(8388608, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals( + databaseMetaData.getMaxBinaryLiteralLength(), resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); assertEquals("", resultSet.getString("IS_NULLABLE")); assertEquals( diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index 24d3940d7..bebe3d8f4 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -3,6 +3,8 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; +import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_CHAR_LENGTH; import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.NumericFunctionsSupported; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.StringFunctionsSupported; @@ -772,8 +774,8 @@ public void testGetFunctionColumns() throws Exception { "create or replace table JDBC_TBL111(colA string, colB decimal, colC " + "timestamp)"); /* Create a UDF that returns a table made up of 4 columns from 2 different tables, joined together */ statement.execute( - "create or replace function FUNC112 () RETURNS TABLE(colA string, colB decimal, bin2" - + " binary, sharedCol decimal) COMMENT= 'returns table of 4 columns' as 'select" + "create or replace function FUNC112 () RETURNS TABLE(colA string(16777216), colB decimal, bin2 " + + "binary(8388608) , sharedCol decimal) COMMENT= 'returns table of 4 columns' as 'select" + " JDBC_TBL111.colA, JDBC_TBL111.colB, BIN_TABLE.bin2, BIN_TABLE.sharedCol from" + " JDBC_TBL111 inner join BIN_TABLE on JDBC_TBL111.colB =BIN_TABLE.sharedCol'"); DatabaseMetaData metaData = connection.getMetaData(); @@ -877,7 +879,7 @@ public void testGetFunctionColumns() throws Exception { assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); // char octet length column is not supported and always returns 0 - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); // is_nullable column is not supported and always returns empty string assertEquals("", resultSet.getString("IS_NULLABLE")); @@ -927,7 +929,7 @@ public void testGetFunctionColumns() throws Exception { assertEquals(DatabaseMetaData.functionNullableUnknown, resultSet.getInt("NULLABLE")); assertEquals("returns table of 4 columns", resultSet.getString("REMARKS")); // char octet length column is not supported and always returns 0 - assertEquals(8388608, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); // is_nullable column is not supported and always returns empty string assertEquals("", resultSet.getString("IS_NULLABLE")); @@ -1222,8 +1224,8 @@ public void testGetColumns() throws Throwable { statement.execute( "create or replace table " + targetTable - + "(C1 int, C2 varchar(100), C3 string default '', C4 number(18,4), C5 double," - + " C6 boolean, C7 date not null, C8 time, C9 timestamp_ntz(7), C10 binary,C11" + + "(C1 int, C2 varchar(100), C3 string(16777216) default '', C4 number(18,4), C5 double," + + " C6 boolean, C7 date not null, C8 time, C9 timestamp_ntz(7), C10 binary(8388608),C11" + " variant, C12 timestamp_ltz(8), C13 timestamp_tz(3))"); DatabaseMetaData metaData = connection.getMetaData(); @@ -1290,14 +1292,14 @@ public void testGetColumns() throws Throwable { assertEquals("C3", resultSet.getString("COLUMN_NAME")); assertEquals(Types.VARCHAR, resultSet.getInt("DATA_TYPE")); assertEquals("VARCHAR", resultSet.getString("TYPE_NAME")); - assertEquals(16777216, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); assertEquals(0, resultSet.getInt("NUM_PREC_RADIX")); assertEquals(ResultSetMetaData.columnNullable, resultSet.getInt("NULLABLE")); assertEquals("", resultSet.getString("REMARKS")); assertEquals("", resultSet.getString("COLUMN_DEF")); - assertEquals(16777216, resultSet.getInt("CHAR_OCTET_LENGTH")); + assertEquals(EXPECTED_MAX_CHAR_LENGTH, resultSet.getInt("CHAR_OCTET_LENGTH")); assertEquals(3, resultSet.getInt("ORDINAL_POSITION")); assertEquals("YES", resultSet.getString("IS_NULLABLE")); assertNull(resultSet.getString("SCOPE_CATALOG")); @@ -1465,7 +1467,7 @@ public void testGetColumns() throws Throwable { assertEquals("C10", resultSet.getString("COLUMN_NAME")); assertEquals(Types.BINARY, resultSet.getInt("DATA_TYPE")); assertEquals("BINARY", resultSet.getString("TYPE_NAME")); - assertEquals(8388608, resultSet.getInt("COLUMN_SIZE")); + assertEquals(EXPECTED_MAX_BINARY_LENGTH, resultSet.getInt("COLUMN_SIZE")); assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); assertEquals(0, resultSet.getInt("NUM_PREC_RADIX")); assertEquals(ResultSetMetaData.columnNullable, resultSet.getInt("NULLABLE")); diff --git a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java new file mode 100644 index 000000000..33ab5e772 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java @@ -0,0 +1,253 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All right reserved. + */ +package net.snowflake.client.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.core.ObjectMapperFactory; +import net.snowflake.client.core.UUIDUtils; +import org.apache.commons.text.RandomStringGenerator; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +@Category(TestCategoryStatement.class) +public class LobSizeLatestIT extends BaseJDBCTest { + + // Max LOB size is testable from version 3.15.0 and above. + private static int maxLobSize = 16 * 1024 * 1024; + private static int largeLobSize = maxLobSize / 2; + private static int mediumLobSize = largeLobSize / 2; + private static int originLobSize = mediumLobSize / 2; + private static int smallLobSize = 16; + + private static Map LobSizeStringValues = + new HashMap() { + { + put(smallLobSize, generateRandomString(smallLobSize)); + put(originLobSize, generateRandomString(originLobSize)); + put(mediumLobSize, generateRandomString(mediumLobSize)); + put(largeLobSize, generateRandomString(largeLobSize)); + put(maxLobSize, generateRandomString(maxLobSize)); + } + }; + + @BeforeClass + public static void setUp() { + System.setProperty( + // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure + ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); + } + + @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") + public static Collection data() { + int[] lobSizes = + new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; + String[] resultFormats = new String[] {"Arrow", "JSON"}; + List ret = new ArrayList<>(); + for (int i = 0; i < lobSizes.length; i++) { + for (int j = 0; j < resultFormats.length; j++) { + ret.add(new Object[] {lobSizes[i], resultFormats[j]}); + } + } + return ret; + } + + private final int lobSize; + + private final String resultFormat; + + public LobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { + this.lobSize = lobSize; + this.resultFormat = resultFormat; + + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); + } + } + + private static String tableName = "my_lob_test"; + private static String executeInsert = "insert into " + tableName + " (c1, c2, c3) values ("; + private static String executePreparedStatementInsert = executeInsert + "?, ?, ?)"; + private static String selectQuery = "select * from " + tableName + " where c3="; + + private static String generateRandomString(int stringSize) { + RandomStringGenerator randomStringGenerator = + new RandomStringGenerator.Builder().withinRange('a', 'z').build(); + return randomStringGenerator.generate(stringSize); + } + + private static void setResultFormat(Statement stmt, String format) throws SQLException { + stmt.execute("alter session set jdbc_query_result_format = '" + format + "'"); + } + + private void createTable(int lobSize, Statement stmt) throws SQLException { + String createTableQuery = + "create or replace table " + + tableName + + " (c1 varchar, c2 varchar(" + + lobSize + + "), c3 varchar)"; + stmt.execute(createTableQuery); + } + + private void insertQuery(String varCharValue, String uuidValue, Statement stmt) + throws SQLException { + stmt.executeUpdate(executeInsert + "'abc', '" + varCharValue + "', '" + uuidValue + "')"); + } + + private void preparedInsertQuery(String varCharValue, String uuidValue, Connection con) + throws SQLException { + try (PreparedStatement pstmt = con.prepareStatement(executePreparedStatementInsert)) { + pstmt.setString(1, "abc"); + pstmt.setString(2, varCharValue); + pstmt.setString(3, uuidValue); + + pstmt.execute(); + } + } + + @AfterClass + public static void tearDown() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + stmt.execute("Drop table if exists " + tableName); + } + } + + @Test + public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + String varCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + insertQuery(varCharValue, uuidValue, stmt); + + try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rs.next()); + assertEquals("abc", rs.getString(1)); + assertEquals(varCharValue, rs.getString(2)); + assertEquals(uuidValue, rs.getString(3)); + } + } + } + + @Test + public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + String maxVarCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + preparedInsertQuery(maxVarCharValue, uuidValue, con); + + try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rs.next()); + assertEquals("abc", rs.getString(1)); + assertEquals(maxVarCharValue, rs.getString(2)); + assertEquals(uuidValue, rs.getString(3)); + } + } + } + + @Test + public void testPutAndGet() throws IOException, SQLException { + File tempFile = File.createTempFile("LobSizeTest", ".csv"); + // Delete file when JVM shuts down + tempFile.deleteOnExit(); + + String filePath = tempFile.getPath(); + String filePathEscaped = filePath.replace("\\", "\\\\"); + String fileName = tempFile.getName(); + + String varCharValue = LobSizeStringValues.get(lobSize); + String uuidValue = UUIDUtils.getUUID().toString(); + String fileInput = "abc," + varCharValue + "," + uuidValue; + + // Print data to new temporary file + try (PrintWriter out = new PrintWriter(filePath)) { + out.println(fileInput); + } + + try (Connection con = BaseJDBCTest.getConnection(); + Statement stmt = con.createStatement()) { + setResultFormat(stmt, resultFormat); + + // Test PUT + String sqlPut = "PUT 'file://" + filePathEscaped + "' @%" + tableName; + + stmt.execute(sqlPut); + + try (ResultSet rsPut = stmt.getResultSet()) { + assertTrue(rsPut.next()); + assertEquals(fileName, rsPut.getString(1)); + assertEquals(fileName + ".gz", rsPut.getString(2)); + assertEquals("GZIP", rsPut.getString(6)); + assertEquals("UPLOADED", rsPut.getString(7)); + } + + try (ResultSet rsFiles = stmt.executeQuery("ls @%" + tableName)) { + // ResultSet should return a row with the zipped file name + assertTrue(rsFiles.next()); + assertEquals(fileName + ".gz", rsFiles.getString(1)); + } + + String copyInto = + "copy into " + + tableName + + " from @%" + + tableName + + " file_format=(type=csv compression='gzip')"; + stmt.execute(copyInto); + + // Check that results are copied into table correctly + try (ResultSet rsCopy = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { + assertTrue(rsCopy.next()); + assertEquals("abc", rsCopy.getString(1)); + assertEquals(varCharValue, rsCopy.getString(2)); + assertEquals(uuidValue, rsCopy.getString(3)); + } + + // Test Get + Path tempDir = Files.createTempDirectory("MaxLobTest"); + // Delete tempDir when JVM shuts down + tempDir.toFile().deleteOnExit(); + String pathToTempDir = tempDir.toString().replace("\\", "\\\\"); + + String getSql = "get @%" + tableName + " 'file://" + pathToTempDir + "'"; + stmt.execute(getSql); + + try (ResultSet rsGet = stmt.getResultSet()) { + assertTrue(rsGet.next()); + assertEquals(fileName + ".gz", rsGet.getString(1)); + assertEquals("DOWNLOADED", rsGet.getString(3)); + } + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java index 8fd874a9c..8962b8141 100644 --- a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java @@ -1,252 +1,48 @@ -/* - * Copyright (c) 2024 Snowflake Computing Inc. All right reserved. - */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.text.IsEmptyString.emptyOrNullString; -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.file.Files; -import java.nio.file.Path; import java.sql.Connection; -import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import net.snowflake.client.category.TestCategoryStatement; -import net.snowflake.client.core.ObjectMapperFactory; -import net.snowflake.client.core.UUIDUtils; -import org.apache.commons.text.RandomStringGenerator; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import net.snowflake.client.ConditionalIgnoreRule; +import net.snowflake.client.RunningOnGithubAction; +import org.hamcrest.CoreMatchers; +import org.junit.Assert; import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) public class MaxLobSizeLatestIT extends BaseJDBCTest { - // Max LOB size is testable from version 3.15.0 and above. - private static int maxLobSize = 16 * 1024 * 1024; - private static int largeLobSize = maxLobSize / 2; - private static int mediumLobSize = largeLobSize / 2; - private static int originLobSize = mediumLobSize / 2; - private static int smallLobSize = 16; - - private static Map LobSizeStringValues = - new HashMap() { - { - put(smallLobSize, generateRandomString(smallLobSize)); - put(originLobSize, generateRandomString(originLobSize)); - put(mediumLobSize, generateRandomString(mediumLobSize)); - put(largeLobSize, generateRandomString(largeLobSize)); - put(maxLobSize, generateRandomString(maxLobSize)); - } - }; - - @BeforeClass - public static void setUp() { - System.setProperty( - // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure - ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); - } - - @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") - public static Collection data() { - int[] lobSizes = - new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; - String[] resultFormats = new String[] {"Arrow", "JSON"}; - List ret = new ArrayList<>(); - for (int i = 0; i < lobSizes.length; i++) { - for (int j = 0; j < resultFormats.length; j++) { - ret.add(new Object[] {lobSizes[i], resultFormats[j]}); - } - } - return ret; - } - - private final int lobSize; - - private final String resultFormat; - - public MaxLobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { - this.lobSize = lobSize; - this.resultFormat = resultFormat; - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - createTable(lobSize, stmt); - } - } - - private static String tableName = "my_lob_test"; - private static String executeInsert = "insert into " + tableName + " (c1, c2, c3) values ("; - private static String executePreparedStatementInsert = executeInsert + "?, ?, ?)"; - private static String selectQuery = "select * from " + tableName + " where c3="; - - private static String generateRandomString(int stringSize) { - RandomStringGenerator randomStringGenerator = - new RandomStringGenerator.Builder().withinRange('a', 'z').build(); - return randomStringGenerator.generate(stringSize); - } - - private static void setResultFormat(Statement stmt, String format) throws SQLException { - stmt.execute("alter session set jdbc_query_result_format = '" + format + "'"); - } - - private void createTable(int lobSize, Statement stmt) throws SQLException { - String createTableQuery = - "create or replace table " - + tableName - + " (c1 varchar, c2 varchar(" - + lobSize - + "), c3 varchar)"; - stmt.execute(createTableQuery); - } - - private void insertQuery(String varCharValue, String uuidValue, Statement stmt) - throws SQLException { - stmt.executeUpdate(executeInsert + "'abc', '" + varCharValue + "', '" + uuidValue + "')"); - } - - private void preparedInsertQuery(String varCharValue, String uuidValue, Connection con) - throws SQLException { - try (PreparedStatement pstmt = con.prepareStatement(executePreparedStatementInsert)) { - pstmt.setString(1, "abc"); - pstmt.setString(2, varCharValue); - pstmt.setString(3, uuidValue); - - pstmt.execute(); - } - } - - @AfterClass - public static void tearDown() throws SQLException { - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - stmt.execute("Drop table if exists " + tableName); - } - } - + /** + * Available since 3.17.0 + * + * @throws SQLException + */ @Test - public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + public void testIncreasedMaxLobSize() throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - String varCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - insertQuery(varCharValue, uuidValue, stmt); - - try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rs.next()); - assertEquals("abc", rs.getString(1)); - assertEquals(varCharValue, rs.getString(2)); - assertEquals(uuidValue, rs.getString(3)); + stmt.execute("alter session set FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY='ENABLED'"); + stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=false"); + try { + stmt.execute("select randstr(20000000, random()) as large_str"); + } catch (SnowflakeSQLException e) { + assertThat(e.getMessage(), CoreMatchers.containsString("exceeds supported length")); } - } - } - - @Test - public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - String maxVarCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - preparedInsertQuery(maxVarCharValue, uuidValue, con); - - try (ResultSet rs = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rs.next()); - assertEquals("abc", rs.getString(1)); - assertEquals(maxVarCharValue, rs.getString(2)); - assertEquals(uuidValue, rs.getString(3)); - } - } - } - - @Test - public void testPutAndGet() throws IOException, SQLException { - File tempFile = File.createTempFile("LobSizeTest", ".csv"); - // Delete file when JVM shuts down - tempFile.deleteOnExit(); - - String filePath = tempFile.getPath(); - String filePathEscaped = filePath.replace("\\", "\\\\"); - String fileName = tempFile.getName(); - - String varCharValue = LobSizeStringValues.get(lobSize); - String uuidValue = UUIDUtils.getUUID().toString(); - String fileInput = "abc," + varCharValue + "," + uuidValue; - - // Print data to new temporary file - try (PrintWriter out = new PrintWriter(filePath)) { - out.println(fileInput); - } - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - setResultFormat(stmt, resultFormat); - - // Test PUT - String sqlPut = "PUT 'file://" + filePathEscaped + "' @%" + tableName; - - stmt.execute(sqlPut); - - try (ResultSet rsPut = stmt.getResultSet()) { - assertTrue(rsPut.next()); - assertEquals(fileName, rsPut.getString(1)); - assertEquals(fileName + ".gz", rsPut.getString(2)); - assertEquals("GZIP", rsPut.getString(6)); - assertEquals("UPLOADED", rsPut.getString(7)); - } - - try (ResultSet rsFiles = stmt.executeQuery("ls @%" + tableName)) { - // ResultSet should return a row with the zipped file name - assertTrue(rsFiles.next()); - assertEquals(fileName + ".gz", rsFiles.getString(1)); - } - - String copyInto = - "copy into " - + tableName - + " from @%" - + tableName - + " file_format=(type=csv compression='gzip')"; - stmt.execute(copyInto); - - // Check that results are copied into table correctly - try (ResultSet rsCopy = stmt.executeQuery(selectQuery + "'" + uuidValue + "'")) { - assertTrue(rsCopy.next()); - assertEquals("abc", rsCopy.getString(1)); - assertEquals(varCharValue, rsCopy.getString(2)); - assertEquals(uuidValue, rsCopy.getString(3)); - } - - // Test Get - Path tempDir = Files.createTempDirectory("MaxLobTest"); - // Delete tempDir when JVM shuts down - tempDir.toFile().deleteOnExit(); - String pathToTempDir = tempDir.toString().replace("\\", "\\\\"); - - String getSql = "get @%" + tableName + " 'file://" + pathToTempDir + "'"; - stmt.execute(getSql); - try (ResultSet rsGet = stmt.getResultSet()) { - assertTrue(rsGet.next()); - assertEquals(fileName + ".gz", rsGet.getString(1)); - assertEquals("DOWNLOADED", rsGet.getString(3)); + stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=true"); + try (ResultSet resultSet = + stmt.executeQuery("select randstr(20000000, random()) as large_str")) { + Assert.assertTrue(resultSet.next()); + assertThat(resultSet.getString(1), is(not(emptyOrNullString()))); + } finally { + stmt.execute("alter session unset ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT"); + stmt.execute("alter session unset FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index fb55a9780..efd185926 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -744,7 +744,7 @@ public void testCallStatementType() throws SQLException { try { String sp = "CREATE OR REPLACE PROCEDURE \"SP_ZSDLEADTIME_ARCHIVE_DAILY\"()\n" - + "RETURNS VARCHAR(16777216)\n" + + "RETURNS VARCHAR\n" + "LANGUAGE SQL\n" + "EXECUTE AS CALLER\n" + "AS \n" @@ -793,7 +793,6 @@ public void testCallStatementType() throws SQLException { assertEquals("SP_ZSDLEADTIME_ARCHIVE_DAILY", resultSetMetaData.getColumnName(1)); assertEquals("VARCHAR", resultSetMetaData.getColumnTypeName(1)); assertEquals(0, resultSetMetaData.getScale(1)); - assertEquals(16777216, resultSetMetaData.getPrecision(1)); } } finally { statement.execute("drop procedure if exists SP_ZSDLEADTIME_ARCHIVE_DAILY()"); @@ -1048,6 +1047,12 @@ public void testLargeStringRetrieval() throws SQLException { int colLength = 16777216; try (Connection con = getConnection(); Statement statement = con.createStatement()) { + SFBaseSession session = con.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); + Integer maxVarcharSize = + (Integer) session.getOtherParameter("VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"); + if (maxVarcharSize != null) { + colLength = maxVarcharSize; + } statement.execute("create or replace table " + tableName + " (c1 string(" + colLength + "))"); statement.execute( "insert into " + tableName + " select randstr(" + colLength + ", random())");