Skip to content

Commit

Permalink
🎉 Source Zendesk Chat: engagements data fix infinity looping + gradle…
Browse files Browse the repository at this point in the history
…w format (#18121)

* fix infinity looping in chats

* added more meaninful to variable name

* bump docker version

* auto-bump connector version

* run format

Co-authored-by: Roberto Bonnet <robertojuarezwp@gmail.com>
Co-authored-by: Octavia Squidington III <octavia-squidington-iii@users.noreply.github.com>
  • Loading branch information
3 people authored Oct 18, 2022
1 parent 740bbcd commit 70dd9a8
Show file tree
Hide file tree
Showing 30 changed files with 246 additions and 233 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1147,7 +1147,7 @@
- name: Zendesk Chat
sourceDefinitionId: 40d24d0f-b8f9-4fe0-9e6c-b06c0f3f45e4
dockerRepository: airbyte/source-zendesk-chat
dockerImageTag: 0.1.10
dockerImageTag: 0.1.11
documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-chat
icon: zendesk.svg
sourceType: api
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11736,7 +11736,7 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-zendesk-chat:0.1.10"
- dockerImage: "airbyte/source-zendesk-chat:0.1.11"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/sources/zendesk-chat"
connectionSpecification:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,9 @@ void testSchema() {
DefaultBigQueryDenormalizedRecordFormatter rf = new DefaultBigQueryDenormalizedRecordFormatter(
jsonNodeSchema, new BigQuerySQLNameTransformer());
final Field subFields = Field.newBuilder("big_query_array", LegacySQLTypeName.RECORD,
Field.of("domain", LegacySQLTypeName.STRING),
Field.of("grants", LegacySQLTypeName.RECORD,
Field.newBuilder("big_query_array", StandardSQLTypeName.STRING).setMode(Mode.REPEATED).build()))
Field.of("domain", LegacySQLTypeName.STRING),
Field.of("grants", LegacySQLTypeName.RECORD,
Field.newBuilder("big_query_array", StandardSQLTypeName.STRING).setMode(Mode.REPEATED).build()))
.setMode(Mode.REPEATED).build();
final Schema expectedResult = Schema.of(
Field.newBuilder("accepts_marketing_updated_at", LegacySQLTypeName.DATETIME).setMode(Mode.NULLABLE).build(),
Expand Down Expand Up @@ -147,8 +147,8 @@ void testSchemaWithInvalidArrayType() {
final Schema expectedResult = Schema.of(
Field.of("name", LegacySQLTypeName.STRING),
Field.newBuilder("permission_list", LegacySQLTypeName.RECORD,
Field.of("domain", LegacySQLTypeName.STRING),
Field.newBuilder("grants", LegacySQLTypeName.STRING).setMode(Mode.REPEATED).build())
Field.of("domain", LegacySQLTypeName.STRING),
Field.newBuilder("grants", LegacySQLTypeName.STRING).setMode(Mode.REPEATED).build())
.setMode(Mode.REPEATED).build(),
Field.of("_airbyte_ab_id", LegacySQLTypeName.STRING),
Field.of("_airbyte_emitted_at", LegacySQLTypeName.TIMESTAMP));
Expand Down Expand Up @@ -181,8 +181,8 @@ void testSchemaWithNestedDatetimeInsideNullObject() {
final Schema expectedResult = Schema.of(
Field.newBuilder("name", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(),
Field.newBuilder("appointment", LegacySQLTypeName.RECORD,
Field.newBuilder("street", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(),
Field.newBuilder("expTime", LegacySQLTypeName.DATETIME).setMode(Mode.NULLABLE).build())
Field.newBuilder("street", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(),
Field.newBuilder("expTime", LegacySQLTypeName.DATETIME).setMode(Mode.NULLABLE).build())
.setMode(Mode.NULLABLE).build(),
Field.of("_airbyte_ab_id", LegacySQLTypeName.STRING),
Field.of("_airbyte_emitted_at", LegacySQLTypeName.TIMESTAMP));
Expand Down Expand Up @@ -213,8 +213,8 @@ void formatRecord_objectType() throws JsonProcessingException {
final DefaultBigQueryDenormalizedRecordFormatter rf = new DefaultBigQueryDenormalizedRecordFormatter(
jsonNodeSchema, new BigQuerySQLNameTransformer());
final JsonNode objectNode = mapper.readTree("""
{"name":"data"}
""");
{"name":"data"}
""");
final AirbyteRecordMessage airbyteRecordMessage = new AirbyteRecordMessage();
airbyteRecordMessage.setEmittedAt(1602637589000L);
airbyteRecordMessage.setData(objectNode);
Expand All @@ -234,8 +234,8 @@ void formatRecord_containsRefDefinition() throws JsonProcessingException {
jsonNodeSchema, new BigQuerySQLNameTransformer());
rf.fieldsContainRefDefinitionValue.add("name");
final JsonNode objectNode = mapper.readTree("""
{"name":"data"}
""");
{"name":"data"}
""");
final AirbyteRecordMessage airbyteRecordMessage = new AirbyteRecordMessage();
airbyteRecordMessage.setEmittedAt(1602637589000L);
airbyteRecordMessage.setData(objectNode);
Expand All @@ -254,8 +254,8 @@ void formatRecord_objectWithArray() throws JsonProcessingException {
DefaultBigQueryDenormalizedRecordFormatter rf = new DefaultBigQueryDenormalizedRecordFormatter(
jsonNodeSchema, new BigQuerySQLNameTransformer());
final JsonNode objectNode = mapper.readTree("""
{"object_with_arrays":["array_3"]}
""");
{"object_with_arrays":["array_3"]}
""");
final AirbyteRecordMessage airbyteRecordMessage = new AirbyteRecordMessage();
airbyteRecordMessage.setEmittedAt(1602637589000L);
airbyteRecordMessage.setData(objectNode);
Expand All @@ -276,7 +276,7 @@ void formatRecordNotObject_thenThrowsError() throws JsonProcessingException {
DefaultBigQueryDenormalizedRecordFormatter rf = new DefaultBigQueryDenormalizedRecordFormatter(
jsonNodeSchema, new BigQuerySQLNameTransformer());
final JsonNode arrayNode = mapper.readTree("""
["one"]""");
["one"]""");

final AirbyteRecordMessage airbyteRecordMessage = new AirbyteRecordMessage();
airbyteRecordMessage.setEmittedAt(1602637589000L);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,16 @@ void surroundArraysByObjects() {
void formatArrayItems() throws JsonProcessingException {
final JsonNode expectedArrayNode = mapper.readTree(
"""
[
{"big_query_array": ["one", "two"]},
{"big_query_array": ["one", "two"]}
]
""");
[
{"big_query_array": ["one", "two"]},
{"big_query_array": ["one", "two"]}
]
""");
final List<JsonNode> arrayNodes = List.of(
mapper.readTree("""
["one", "two"]"""),
mapper.readTree("""
["one", "two"]"""));
mapper.readTree("""
["one", "two"]"""),
mapper.readTree("""
["one", "two"]"""));

final JsonNode result = formatter.formatArrayItems(arrayNodes);

Expand All @@ -50,10 +50,10 @@ void formatArrayItems() throws JsonProcessingException {

@Test
void formatArrayItems_notArray() throws JsonProcessingException {
final JsonNode objectNodeInput = mapper.readTree("""
{"type":"object","items":{"type":"integer"}}""");
final JsonNode expectedResult = mapper.readTree("""
[{"type":"object","items":{"type":"integer"}}]""");
final JsonNode objectNodeInput = mapper.readTree("""
{"type":"object","items":{"type":"integer"}}""");
final JsonNode expectedResult = mapper.readTree("""
[{"type":"object","items":{"type":"integer"}}]""");

final JsonNode result = formatter.formatArrayItems(List.of(objectNodeInput));

Expand All @@ -64,26 +64,27 @@ void formatArrayItems_notArray() throws JsonProcessingException {
void findArrays() throws JsonProcessingException {
final JsonNode schemaArrays = getSchemaArrays();
final List<JsonNode> expectedResult = List.of(
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree(
"""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""));
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""));

final List<JsonNode> result = formatter.findArrays(schemaArrays);
assertEquals(expectedResult, result);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,14 @@

package io.airbyte.integrations.destination.bigquery.formatter.arrayformater;

import static io.airbyte.integrations.destination.bigquery.formatter.util.FormatterUtil.NESTED_ARRAY_FIELD;
import static io.airbyte.integrations.destination.bigquery.formatter.util.FormatterUtil.TYPE_FIELD;
import static io.airbyte.integrations.destination.bigquery.util.BigQueryDenormalizedTestSchemaUtils.getExpectedSchemaArraysLegacy;
import static io.airbyte.integrations.destination.bigquery.util.BigQueryDenormalizedTestSchemaUtils.getSchemaArrays;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeType;
import java.util.List;
import org.junit.jupiter.api.Test;

Expand All @@ -38,26 +33,27 @@ void surroundArraysByObjects() {
void findArrays() throws JsonProcessingException {
final JsonNode schemaArrays = getSchemaArrays();
final List<JsonNode> expectedResult = List.of(
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""),
mapper.readTree(
"""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""));
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":["array"],"items":{"type":"integer"}}}"""),
mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}"""));

final List<JsonNode> result = formatter.findArrays(schemaArrays);

Expand All @@ -74,13 +70,13 @@ void findArrays_null() {
void formatArrayItems() throws JsonProcessingException {
final JsonNode expectedArrayNode = mapper.readTree(
"""
{"big_query_array": [["one", "two"], ["one", "two"]]}
""");
{"big_query_array": [["one", "two"], ["one", "two"]]}
""");
final List<JsonNode> arrayNodes = List.of(
mapper.readTree("""
["one", "two"]"""),
mapper.readTree("""
["one", "two"]"""));
mapper.readTree("""
["one", "two"]"""),
mapper.readTree("""
["one", "two"]"""));

final JsonNode result = formatter.formatArrayItems(arrayNodes);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,33 +20,33 @@ class FormatterUtilTest {
void isAirbyteArray_typeIsNull() throws JsonProcessingException {
final JsonNode arrayNode = mapper.readTree(
"""
["one", "two"]""");
["one", "two"]""");

final boolean result = FormatterUtil.isAirbyteArray(arrayNode);
assertFalse(result);
}

@Test
void isAirbyteArray_typeFieldIsArray() throws JsonProcessingException {
final JsonNode arrayNode = mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}""");
final JsonNode arrayNode = mapper.readTree("""
{"type":["array"],"items":{"type":"integer"}}""");

boolean result = FormatterUtil.isAirbyteArray(arrayNode);
assertTrue(result);
}

@Test
void isAirbyteArray_typeFieldIsNotArray() throws JsonProcessingException {
final JsonNode objectNode = mapper.readTree("""
{"type":"object"}""");
final JsonNode objectNode = mapper.readTree("""
{"type":"object"}""");
final boolean result = FormatterUtil.isAirbyteArray(objectNode);
assertFalse(result);
}

@Test
void isAirbyteArray_textIsNotArray() throws JsonProcessingException {
final JsonNode arrayNode = mapper.readTree("""
{"type":["notArrayText"]}""");
final JsonNode arrayNode = mapper.readTree("""
{"type":["notArrayText"]}""");
final boolean result = FormatterUtil.isAirbyteArray(arrayNode);
assertFalse(result);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
/*
* Copyright (c) 2022 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.integrations.destination.elasticsearch;

import com.fasterxml.jackson.databind.JsonNode;
Expand All @@ -12,6 +16,7 @@
import org.testcontainers.elasticsearch.ElasticsearchContainer;

public abstract class SshElasticsearchDestinationAcceptanceTest extends ElasticsearchDestinationAcceptanceTest {

private static final Network network = Network.newNetwork();
private static final SshBastionContainer bastion = new SshBastionContainer();
private static ElasticsearchContainer container;
Expand Down Expand Up @@ -61,4 +66,5 @@ public static void afterAll() {
container.close();
bastion.getContainer().close();
}

}
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
/*
* Copyright (c) 2022 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.integrations.destination.elasticsearch;

import io.airbyte.integrations.base.ssh.SshTunnel;
Expand All @@ -7,4 +11,5 @@ public class SshKeyElasticsearchDestinationAcceptanceTest extends SshElasticsear
public SshTunnel.TunnelMethod getTunnelMethod() {
return SshTunnel.TunnelMethod.SSH_KEY_AUTH;
}

}
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
/*
* Copyright (c) 2022 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.integrations.destination.elasticsearch;

import io.airbyte.integrations.base.ssh.SshTunnel;

public class SshPasswordElasticsearchDestinationAcceptanceTest extends SshElasticsearchDestinationAcceptanceTest {
public class SshPasswordElasticsearchDestinationAcceptanceTest extends SshElasticsearchDestinationAcceptanceTest {

@Override
public SshTunnel.TunnelMethod getTunnelMethod() {
return SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,9 @@ protected JsonNode getFailCheckConfig() {

@Override
protected List<JsonNode> retrieveRecords(TestDestinationEnv testEnv,
String streamName,
String namespace,
JsonNode streamSchema) {
String streamName,
String namespace,
JsonNode streamSchema) {
var key = redisNameTransformer.keyName(namespace, streamName);
return redisCache.getAll(key).stream()
.sorted(Comparator.comparing(RedisRecord::getTimestamp))
Expand All @@ -97,7 +97,6 @@ protected List<JsonNode> retrieveRecords(TestDestinationEnv testEnv,
.collect(Collectors.toList());
}


@Override
protected boolean implementsNamespaces() {
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,6 @@ def load_dataframes(self, fp, skip_data=False) -> Iterable:
logger.error(error_msg)
raise ConfigurationError(error_msg) from err


reader_options = {**self._reader_options}
try:
if self._reader_format == "csv":
Expand Down
Loading

0 comments on commit 70dd9a8

Please sign in to comment.