Skip to content

Commit

Permalink
Map number(integer) into an integer rather than a float (#20730)
Browse files Browse the repository at this point in the history
* Fix failing test

* Bigquery-denormalized update spec plus destination_definitions.yaml to fix integration tests

* sanity

* Update version number and release note

* auto-bump connector version

Co-authored-by: Octavia Squidington III <octavia-squidington-iii@users.noreply.github.com>
  • Loading branch information
rodireich and octavia-squidington-iii authored Jan 4, 2023
1 parent 06ef1e9 commit 6a68d1f
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
- name: BigQuery (denormalized typed struct)
destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496
dockerRepository: airbyte/destination-bigquery-denormalized
dockerImageTag: 1.2.9
dockerImageTag: 1.2.10
documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery
icon: bigquery.svg
resourceRequirements:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -831,7 +831,7 @@
- "overwrite"
- "append"
- "append_dedup"
- dockerImage: "airbyte/destination-bigquery-denormalized:1.2.9"
- dockerImage: "airbyte/destination-bigquery-denormalized:1.2.10"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/destinations/bigquery"
connectionSpecification:
Expand Down Expand Up @@ -1025,7 +1025,7 @@
order: 5
supportsIncremental: true
supportsNormalization: false
supportsDBT: true
supportsDBT: false
supported_destination_sync_modes:
- "overwrite"
- "append"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true

COPY --from=build /airbyte /airbyte

LABEL io.airbyte.version=1.2.9
LABEL io.airbyte.version=1.2.10
LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -68,15 +69,15 @@ private ArrayFormatter getArrayFormatter() {
return arrayFormatter;
}

public void setArrayFormatter(ArrayFormatter arrayFormatter) {
public void setArrayFormatter(final ArrayFormatter arrayFormatter) {
this.arrayFormatter = arrayFormatter;
this.jsonSchema = formatJsonSchema(this.originalJsonSchema.deepCopy());
this.bigQuerySchema = getBigQuerySchema(jsonSchema);
}

@Override
protected JsonNode formatJsonSchema(final JsonNode jsonSchema) {
var modifiedJsonSchema = jsonSchema.deepCopy(); // Issue #5912 is reopened (PR #11166) formatAllOfAndAnyOfFields(namingResolver, jsonSchema);
final var modifiedJsonSchema = jsonSchema.deepCopy(); // Issue #5912 is reopened (PR #11166) formatAllOfAndAnyOfFields(namingResolver, jsonSchema);
getArrayFormatter().populateEmptyArrays(modifiedJsonSchema);
getArrayFormatter().surroundArraysByObjects(modifiedJsonSchema);
return modifiedJsonSchema;
Expand Down Expand Up @@ -117,7 +118,7 @@ private JsonNode formatData(final FieldList fields, final JsonNode root) {
if (fields == null) {
return root;
}
JsonNode formattedData;
final JsonNode formattedData;
if (root.isObject()) {
formattedData = getObjectNode(fields, root);
} else if (root.isArray()) {
Expand Down Expand Up @@ -151,7 +152,7 @@ private JsonNode getArrayNode(final FieldList fields, final JsonNode root) {
} else {
subFields = arrayField.getSubFields();
}
List<JsonNode> arrayItems = MoreIterators.toList(root.elements()).stream()
final List<JsonNode> arrayItems = MoreIterators.toList(root.elements()).stream()
.map(p -> formatData(subFields, p))
.toList();

Expand Down Expand Up @@ -245,15 +246,15 @@ private static JsonNode getFileDefinition(final JsonNode fieldDefinition) {
}

private static JsonNode allOfAndAnyOfFieldProcessing(final String fieldName, final JsonNode fieldDefinition) {
ObjectReader reader = mapper.readerFor(new TypeReference<List<JsonNode>>() {});
List<JsonNode> list;
final ObjectReader reader = mapper.readerFor(new TypeReference<List<JsonNode>>() {});
final List<JsonNode> list;
try {
list = reader.readValue(fieldDefinition.get(fieldName));
} catch (IOException e) {
} catch (final IOException e) {
throw new IllegalStateException(
String.format("Failed to read and process the following field - %s", fieldDefinition));
}
ObjectNode objectNode = mapper.createObjectNode();
final ObjectNode objectNode = mapper.createObjectNode();
list.forEach(field -> {
objectNode.set("big_query_" + field.get("type").asText(), field);
});
Expand All @@ -268,8 +269,8 @@ private static JsonNode allOfAndAnyOfFieldProcessing(final String fieldName, fin
private static Builder getField(final StandardNameTransformer namingResolver, final String key, final JsonNode fieldDefinition) {
final String fieldName = namingResolver.getIdentifier(key);
final Builder builder = Field.newBuilder(fieldName, StandardSQLTypeName.STRING);
JsonNode updatedFileDefinition = getFileDefinition(fieldDefinition);
JsonNode type = updatedFileDefinition.get(TYPE_FIELD);
final JsonNode updatedFileDefinition = getFileDefinition(fieldDefinition);
final JsonNode type = updatedFileDefinition.get(TYPE_FIELD);
final JsonNode airbyteType = updatedFileDefinition.get(AIRBYTE_TYPE);
final List<JsonSchemaType> fieldTypes = getTypes(fieldName, type);
for (int i = 0; i < fieldTypes.size(); i++) {
Expand All @@ -288,7 +289,9 @@ private static Builder getField(final StandardNameTransformer namingResolver, fi
builder.setType(primaryType.getBigQueryType());
}
case NUMBER -> {
if (airbyteType != null && airbyteType.asText().equals("big_integer")) {
if (airbyteType != null
&& StringUtils.equalsAnyIgnoreCase(airbyteType.asText(),
"big_integer", "integer")) {
builder.setType(StandardSQLTypeName.INT64);
} else {
builder.setType(primaryType.getBigQueryType());
Expand Down
3 changes: 2 additions & 1 deletion docs/integrations/destinations/bigquery.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ Airbyte converts any invalid characters into `_` characters when writing data. H
|:------------------------------------|:--------------|:---------------------------|
| DATE | DATE | DATE |
| STRING (BASE64) | STRING | STRING |
| NUMBER | FLOAT | FLOAT |
| NUMBER | FLOAT | NUMBER |
| OBJECT | STRING | RECORD |
| STRING | STRING | STRING |
| BOOLEAN | BOOLEAN | BOOLEAN |
Expand Down Expand Up @@ -191,6 +191,7 @@ Now that you have set up the BigQuery destination connector, check out the follo

| Version | Date | Pull Request | Subject |
|:--------|:-----------|:----------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------|
| 1.2.10 | 2023-01-04 | [#20730](https://github.com/airbytehq/airbyte/pull/20730) | An incoming source Number type will create a big query integer rather than a float. |
| 1.2.9 | 2022-12-14 | [#20501](https://github.com/airbytehq/airbyte/pull/20501) | Report GCS staging failures that occur during connection check |
| 1.2.8 | 2022-11-22 | [#19489](https://github.com/airbytehq/airbyte/pull/19489) | Added non-billable projects handle to check connection stage |
| 1.2.7 | 2022-11-11 | [#19358](https://github.com/airbytehq/airbyte/pull/19358) | Fixed check method to capture mismatch dataset location |
Expand Down

0 comments on commit 6a68d1f

Please sign in to comment.