-
Notifications
You must be signed in to change notification settings - Fork 4.2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
destination-redshift: Fix StackOverflowError with eager rendering of …
…nested jooq function call sql. (#33877) Signed-off-by: Gireesh Sreepathi <gisripa@gmail.com>
- Loading branch information
Showing
5 changed files
with
366 additions
and
14 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
137 changes: 137 additions & 0 deletions
137
...o/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorTest.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,137 @@ | ||
/* | ||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved. | ||
*/ | ||
|
||
package io.airbyte.integrations.destination.redshift.typing_deduping; | ||
|
||
import static org.junit.jupiter.api.Assertions.*; | ||
|
||
import io.airbyte.commons.resources.MoreResources; | ||
import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; | ||
import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType; | ||
import io.airbyte.integrations.base.destination.typing_deduping.Array; | ||
import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; | ||
import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; | ||
import io.airbyte.integrations.base.destination.typing_deduping.StreamId; | ||
import io.airbyte.integrations.base.destination.typing_deduping.Struct; | ||
import io.airbyte.integrations.destination.redshift.RedshiftSQLNameTransformer; | ||
import io.airbyte.protocol.models.v0.DestinationSyncMode; | ||
import io.airbyte.protocol.models.v0.SyncMode; | ||
import java.io.IOException; | ||
import java.time.Instant; | ||
import java.util.Arrays; | ||
import java.util.LinkedHashMap; | ||
import java.util.List; | ||
import java.util.Optional; | ||
import java.util.Random; | ||
import org.jooq.DSLContext; | ||
import org.jooq.conf.Settings; | ||
import org.jooq.impl.DSL; | ||
import org.junit.jupiter.api.BeforeEach; | ||
import org.junit.jupiter.api.Test; | ||
|
||
public class RedshiftSqlGeneratorTest { | ||
|
||
private static final Random RANDOM = new Random(); | ||
|
||
private static final RedshiftSqlGenerator redshiftSqlGenerator = new RedshiftSqlGenerator(new RedshiftSQLNameTransformer()) { | ||
|
||
// Override only for tests to print formatted SQL. The actual implementation should use unformatted | ||
// to save bytes. | ||
@Override | ||
protected DSLContext getDslContext() { | ||
return DSL.using(getDialect(), new Settings().withRenderFormatted(true)); | ||
} | ||
|
||
}; | ||
|
||
private StreamId streamId; | ||
|
||
private StreamConfig incrementalDedupStream; | ||
|
||
private StreamConfig incrementalAppendStream; | ||
|
||
@BeforeEach | ||
public void setup() { | ||
streamId = new StreamId("test_schema", "users_final", "test_schema", "users_raw", "test_schema", "users_final"); | ||
final ColumnId id1 = redshiftSqlGenerator.buildColumnId("id1"); | ||
final ColumnId id2 = redshiftSqlGenerator.buildColumnId("id2"); | ||
List<ColumnId> primaryKey = List.of(id1, id2); | ||
ColumnId cursor = redshiftSqlGenerator.buildColumnId("updated_at"); | ||
|
||
LinkedHashMap<ColumnId, AirbyteType> columns = new LinkedHashMap<>(); | ||
columns.put(id1, AirbyteProtocolType.INTEGER); | ||
columns.put(id2, AirbyteProtocolType.INTEGER); | ||
columns.put(cursor, AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); | ||
columns.put(redshiftSqlGenerator.buildColumnId("struct"), new Struct(new LinkedHashMap<>())); | ||
columns.put(redshiftSqlGenerator.buildColumnId("array"), new Array(AirbyteProtocolType.UNKNOWN)); | ||
columns.put(redshiftSqlGenerator.buildColumnId("string"), AirbyteProtocolType.STRING); | ||
columns.put(redshiftSqlGenerator.buildColumnId("number"), AirbyteProtocolType.NUMBER); | ||
columns.put(redshiftSqlGenerator.buildColumnId("integer"), AirbyteProtocolType.INTEGER); | ||
columns.put(redshiftSqlGenerator.buildColumnId("boolean"), AirbyteProtocolType.BOOLEAN); | ||
columns.put(redshiftSqlGenerator.buildColumnId("timestamp_with_timezone"), AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); | ||
columns.put(redshiftSqlGenerator.buildColumnId("timestamp_without_timezone"), AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE); | ||
columns.put(redshiftSqlGenerator.buildColumnId("time_with_timezone"), AirbyteProtocolType.TIME_WITH_TIMEZONE); | ||
columns.put(redshiftSqlGenerator.buildColumnId("time_without_timezone"), AirbyteProtocolType.TIME_WITHOUT_TIMEZONE); | ||
columns.put(redshiftSqlGenerator.buildColumnId("date"), AirbyteProtocolType.DATE); | ||
columns.put(redshiftSqlGenerator.buildColumnId("unknown"), AirbyteProtocolType.UNKNOWN); | ||
columns.put(redshiftSqlGenerator.buildColumnId("_ab_cdc_deleted_at"), AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); | ||
incrementalDedupStream = new StreamConfig( | ||
streamId, | ||
SyncMode.INCREMENTAL, | ||
DestinationSyncMode.APPEND_DEDUP, | ||
primaryKey, | ||
Optional.of(cursor), | ||
columns); | ||
incrementalAppendStream = new StreamConfig( | ||
streamId, | ||
SyncMode.INCREMENTAL, | ||
DestinationSyncMode.APPEND, | ||
primaryKey, | ||
Optional.of(cursor), | ||
columns); | ||
} | ||
|
||
@Test | ||
public void testTypingAndDeduping() throws IOException { | ||
String expectedSql = MoreResources.readResource("typing_deduping_with_cdc.sql"); | ||
String generatedSql = | ||
redshiftSqlGenerator.updateTable(incrementalDedupStream, "unittest", Optional.of(Instant.parse("2023-02-15T18:35:24.00Z")), false); | ||
List<String> expectedSqlLines = Arrays.stream(expectedSql.split("\n")).map(String::trim).toList(); | ||
List<String> generatedSqlLines = Arrays.stream(generatedSql.split("\n")).map(String::trim).toList(); | ||
System.out.println(generatedSql); | ||
assertEquals(expectedSqlLines, generatedSqlLines); | ||
} | ||
|
||
@Test | ||
public void test2000ColumnSql() { | ||
final ColumnId id1 = redshiftSqlGenerator.buildColumnId("id1"); | ||
final ColumnId id2 = redshiftSqlGenerator.buildColumnId("id2"); | ||
List<ColumnId> primaryKey = List.of(id1, id2); | ||
ColumnId cursor = redshiftSqlGenerator.buildColumnId("updated_at"); | ||
|
||
LinkedHashMap<ColumnId, AirbyteType> columns = new LinkedHashMap<>(); | ||
columns.put(id1, AirbyteProtocolType.INTEGER); | ||
columns.put(id2, AirbyteProtocolType.INTEGER); | ||
columns.put(cursor, AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE); | ||
|
||
for (int i = 0; i < 2000; i++) { | ||
final String columnName = RANDOM | ||
.ints('a', 'z' + 1) | ||
.limit(15) | ||
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) | ||
.toString(); | ||
columns.put(redshiftSqlGenerator.buildColumnId(columnName), AirbyteProtocolType.STRING); | ||
} | ||
String generatedSql = redshiftSqlGenerator.updateTable(new StreamConfig( | ||
streamId, | ||
SyncMode.INCREMENTAL, | ||
DestinationSyncMode.APPEND_DEDUP, | ||
primaryKey, | ||
Optional.of(cursor), | ||
columns), "unittest", Optional.of(Instant.parse("2023-02-15T18:35:24.00Z")), false); | ||
// This should not throw an exception. | ||
assertFalse(generatedSql.isEmpty()); | ||
} | ||
|
||
} |
Oops, something went wrong.