Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Disable Failing Connections feature #10877

Closed
wants to merge 39 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
11a0291
Add Disable Failing Connections feature
terencecho Mar 5, 2022
2d23680
Rename and cleanup
terencecho Mar 7, 2022
100aab5
list jobs based off connection id
terencecho Mar 9, 2022
600246d
Move variables to env config and update unit tests
terencecho Mar 10, 2022
2ae29b4
Fix env flag name
terencecho Mar 10, 2022
007d755
Merge branch 'master' into tcho/disable-failing-connections
terencecho Mar 10, 2022
f209000
Fix missing name changes
terencecho Mar 10, 2022
207863f
Add comments to unit test
terencecho Mar 10, 2022
26f7b3a
Address PR comments
terencecho Mar 11, 2022
dcd5636
Support multiple config types
terencecho Mar 11, 2022
95a8ad9
Update unit tests
terencecho Mar 13, 2022
dc61d12
Bump Airbyte version from 0.35.49-alpha to 0.35.50-alpha (#11050)
octavia-squidington-iii Mar 10, 2022
ade53bd
Remove the attemptId notion in the connectionManagerWorkflow (#10780)
benmoriceau Mar 10, 2022
0b64565
Revert "add /tmp emptyDir volume to connector pods (#10761)" (#11053)
pmossman Mar 11, 2022
f9b6b84
Put getState() and isRunning() call within same try/catch block (#11013)
pmossman Mar 11, 2022
5fd0eac
Revert "Remove the attemptId notion in the connectionManagerWorkflow …
benmoriceau Mar 11, 2022
9b3abe8
Bump Airbyte version from 0.35.50-alpha to 0.35.51-alpha (#11059)
octavia-squidington-iii Mar 11, 2022
b6ad760
Add more cloud metrics. (#10956)
davinchia Mar 11, 2022
366d1a0
Refactor Snowflake internal Staging as a base class for other staging…
ChristopheDuong Mar 11, 2022
274b0bd
Refactor Reporter App. (#11070)
davinchia Mar 11, 2022
5c1066d
Snowflake destination: added unit tests (#10699)
sashaNeshcheret Mar 11, 2022
f031ec1
Revert "Revert "Remove the attemptId notion in the connectionManagerW…
benmoriceau Mar 11, 2022
f94a8d3
add spotbugs (#10522)
cgardens Mar 11, 2022
c34815d
Remove deprecated FailureReason enum values (#10773)
pmossman Mar 11, 2022
6c649d9
Bump Airbyte version from 0.35.51-alpha to 0.35.52-alpha (#11075)
octavia-squidington-iii Mar 11, 2022
0e33cdc
destination-s3: add a test for listObjects permission on destination …
grishick Mar 11, 2022
3bde910
Revert "Revert "Revert "Remove the attemptId notion in the connection…
benmoriceau Mar 11, 2022
1108910
Revert "Jamakase/select default sync modes (#10320)" (#11080)
benmoriceau Mar 12, 2022
a1a1352
🎉 upgrade dbt to 1.0.0 (except for oracle and mysql) (#11051)
edgao Mar 12, 2022
9406559
Bump Airbyte version from 0.35.52-alpha to 0.35.53-alpha (#11085)
octavia-squidington-iii Mar 12, 2022
f2d274f
add pod name to logs when KubePodProcess fails because pod was not fo…
cgardens Mar 12, 2022
348fc6e
Reporter App Monitoring. (#11074)
davinchia Mar 13, 2022
5347ff1
Initial script to start removing zombie build instances. (#11088)
davinchia Mar 13, 2022
f5204d7
Extract secrets handling out of ConfigRepository (#8898)
cgardens Mar 13, 2022
c807df2
Add readmes to all modules (#8893)
cgardens Mar 13, 2022
f6bd217
Document where creds can be found for logging integration tests (#8733)
cgardens Mar 13, 2022
3563452
add helper method for creating postgres db (#6244)
cgardens Mar 13, 2022
3dacb6d
Add Disable Failing Connections feature
terencecho Mar 5, 2022
6c22bfe
Rename and cleanup
terencecho Mar 7, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.35.49-alpha
current_version = 0.35.53-alpha
commit = False
tag = False
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(\-[a-z]+)?
Expand Down
3 changes: 2 additions & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


### SHARED ###
VERSION=0.35.49-alpha
VERSION=0.35.53-alpha

# When using the airbyte-db via default docker image
CONFIG_ROOT=/data
Expand Down Expand Up @@ -91,3 +91,4 @@ MAX_DISCOVER_WORKERS=5

### FEATURE FLAGS ###
NEW_SCHEDULER=false
AUTO_DISABLE_FAILING_CONNECTIONS=false
6 changes: 5 additions & 1 deletion .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,6 @@ jobs:
label: ${{ needs.start-platform-new-scheduler-acceptance-runner.outputs.label }}
ec2-instance-id: ${{ needs.start-platform-new-scheduler-acceptance-runner.outputs.ec2-instance-id }}


# In case of self-hosted EC2 errors, remove this block.
stop-platform-build-runner:
name: "Platform: Stop Build EC2 Runner"
Expand Down Expand Up @@ -656,13 +655,16 @@ jobs:
- name: Run Logging Tests
run: ./tools/bin/cloud_storage_logging_test.sh
env:
# AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS
AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }}
# GOOGLE_CLOUD_STORAGE_TEST_CREDS can be found in LastPass as "google cloud storage ( gcs ) test creds"
GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }}

- name: Run Kubernetes End-to-End Acceptance Tests
env:
USER: root
HOME: /home/runner
# AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS
AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }}
SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }}
SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }}
Expand All @@ -684,6 +686,7 @@ jobs:
env:
USER: root
HOME: /home/runner
# AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS
AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }}
SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }}
SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }}
Expand Down Expand Up @@ -806,6 +809,7 @@ jobs:
env:
USER: root
HOME: /home/runner
# AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS
AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }}
SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }}
SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }}
Expand Down
27 changes: 27 additions & 0 deletions .github/workflows/terminate-zombie-build-instances.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Terminate Zombie Build Instances

on:
push:
schedule:
- cron: "0 */1 * * *"

jobs:
terminate:
runs-on: ubuntu-latest
steps:
- name: List Instances Older Than an Hour
env:
AWS_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }}
# See https://github.com/aws/aws-cli/issues/5623
AWS_EC2_METADATA_DISABLED: true
run: |
set -euxo pipefail

aws configure set default.region us-east-2

# Since the AWS cli returns an ISO HH:MM timestamp, and Jq only accepts Z timestamps, we define a function toZ to convert this.
aws ec2 describe-instances --no-paginate --filters Name=instance-type,Values=c5.2xlarge Name=instance-state-name,Values=running \
--query 'Reservations[*].Instances[*].{Instance:InstanceId,LaunchTime:LaunchTime}' --output json \
| jq 'def toZ(str): str | (split("+")[0] + "Z") | fromdate ;
flatten | map( { InstanceId: .Instance, LaunchTime: toZ(.LaunchTime) } ) | map( select ( .LaunchTime < (now - 3600) ) )'
3 changes: 3 additions & 0 deletions airbyte-analytics/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# airbyte-analytics

Java library with shared code for telemetry tracking including Segment.
11 changes: 11 additions & 0 deletions airbyte-api/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# airbyte-api

Defines the OpenApi configuration for the Airbyte Configuration API. It also is responsible for generating the following from the API spec:
* Java API client
* Java API server - this generated code is used in `airbyte-server` to allow us to implement the Configuration API in a type safe way. See `ConfigurationApi.java` in `airbyte-server`
* API docs

## Key Files
* src/openapi/config.yaml - Defines the config API interface using OpenApi3
* AirbyteApiClient.java - wraps all api clients so that they can be dependency injected together
* PatchedLogsApi.java - fixes generated code for log api.
3 changes: 0 additions & 3 deletions airbyte-api/src/main/openapi/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3343,19 +3343,16 @@ components:
description: Indicates where the error originated. If not set, the origin of error is not well known.
type: string
enum:
- unknown # todo (parker) remove this in favor of leaving the failureOrigin unset
- source
- destination
- replication
- replication_worker # todo (parker) remove this in favor of replication
- persistence
- normalization
- dbt
AttemptFailureType:
description: Categorizes well known errors into types for programmatic handling. If not set, the type of error is not well known.
type: string
enum:
- unknown # todo (parker) remove this in favor of leaving the failureType unset
- config_error
- system_error
- manual_cancellation
Expand Down
4 changes: 2 additions & 2 deletions airbyte-bootloader/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@ ENV APPLICATION airbyte-bootloader

WORKDIR /app

ADD bin/${APPLICATION}-0.35.49-alpha.tar /app
ADD bin/${APPLICATION}-0.35.53-alpha.tar /app

ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}"]
ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}"]
6 changes: 6 additions & 0 deletions airbyte-bootloader/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# airbyte-bootloader

This application runs at start up for Airbyte. It is responsible for making sure that the environment is upgraded and in a good state. e.g. It makes sure the database has been migrated to the correct version.

## Entrypoint
* BootloaderApp.java - has the main method for running the bootloader.
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ public void load() throws Exception {

final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase);
final ConfigRepository configRepository =
new ConfigRepository(configPersistence, null, Optional.empty(), Optional.empty(), configDatabase);
new ConfigRepository(configPersistence, configDatabase);

createWorkspaceIfNoneExists(configRepository);
LOGGER.info("Default workspace created..");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ void testBootloaderAppBlankDb() throws Exception {
container.getPassword(),
container.getJdbcUrl()).getInitialized();
val jobsMigrator = new JobsDatabaseMigrator(jobDatabase, this.getClass().getName());
assertEquals("0.35.5.001", jobsMigrator.getLatestMigration().getVersion().getVersion());
assertEquals("0.35.40.001", jobsMigrator.getLatestMigration().getVersion().getVersion());

val configDatabase = new ConfigsDatabaseInstance(
mockedConfigs.getConfigDatabaseUser(),
Expand Down
3 changes: 3 additions & 0 deletions airbyte-cli/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# airbyte-cli

Thin CLI over the Airbyte Configuration API to make it easier to interact with the API from the command line.
2 changes: 2 additions & 0 deletions airbyte-commons-cli/readme.md
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
# airbyte-commons-cli

This module houses utility functions for the `commons-cli` library. It is separate from `commons`, because it depends on external library `commons-cli` which we do not want to introduce as a dependency to every module.
3 changes: 3 additions & 0 deletions airbyte-commons-docker/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# airbyte-commons-docker

This module contains common helpers for interacting with Docker and Docker images from Java.
2 changes: 1 addition & 1 deletion airbyte-commons/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@ plugins {
}

dependencies {
// Dependencies for this module should be specified in the top-level build.gradle.
// Dependencies for this module should be specified in the top-level build.gradle. See readme for more explanation.
}
7 changes: 7 additions & 0 deletions airbyte-commons/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# airbyte-commons

Common java helpers.

This submodule is inherited by all other java modules in the monorepo! It is therefore important that we do not add dependencies to it, as those dependencies will also be added to every java module. The only dependencies that this module uses are the ones declared in the `build.gradle` at the root of the Airbyte monorepo. In other words it only uses dependencies that are already shared across all modules. The `dependencies` section of the `build.gradle` of `airbyte-commons` should always be empty.

For other common java code that needs to be shared across modules that requires additional dependencies, we follow this convention: `airbyte-commons-<name of lib>`. See for example `airbyte-commons-cli` and `airbyte-commons-docker`.
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,11 @@ public boolean usesNewScheduler() {
return Boolean.parseBoolean(System.getenv("NEW_SCHEDULER"));
}

@Override
public boolean autoDisablesFailingConnections() {
log.info("Auto Disable Failing Connections: " + Boolean.parseBoolean(System.getenv("AUTO_DISABLE_FAILING_CONNECTIONS")));

return Boolean.parseBoolean(System.getenv("AUTO_DISABLE_FAILING_CONNECTIONS"));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,6 @@ public interface FeatureFlags {

boolean usesNewScheduler();

boolean autoDisablesFailingConnections();

}
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@

import java.util.concurrent.Callable;
import java.util.function.Function;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Slf4j
public class Exceptions {

private static final Logger LOGGER = LoggerFactory.getLogger(Exceptions.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public void testGetTailExists() throws IOException {
"line7",
"line8");

final Writer writer = new BufferedWriter(new FileWriter(stdoutFile.toString(), true));
final Writer writer = new BufferedWriter(new FileWriter(stdoutFile.toString(), StandardCharsets.UTF_8, true));

for (final String line : Iterables.concat(head, expectedTail)) {
writer.write(line + "\n");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
Expand Down Expand Up @@ -55,7 +56,7 @@ void testSerializeJsonNode() {
assertEquals(
"{\"test\":\"dGVzdA==\"}",
Jsons.serialize(Jsons.jsonNode(ImmutableMap.of(
"test", new BinaryNode("test".getBytes())))));
"test", new BinaryNode("test".getBytes(StandardCharsets.UTF_8))))));
}

@Test
Expand Down
8 changes: 8 additions & 0 deletions airbyte-config/init/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# airbyte-config:init

This module fulfills two responsibilities:
1. It is where we declare what connectors should ship with the Platform. See below for more instruction on how it works.
2. It contains the scripts and Dockerfile that allow the `docker-compose` version of Airbyte to mount the local filesystem. This is helpful in cases where a user wants to use a connector that interacts with (reads data from or writes data to) the local filesystem. e.g. `destination-local-json`.

## Declaring connectors that ship with the Platform
In order to have a connector ship with the Platform is must be present in the respective `source_definitions.yaml` or `destination_definitions.yaml` files in `src/main/resources/seed`. If a connector is added there, the build system will handle fetching its spec and adding it to `source_specs.yaml` or `destination_specs.yaml`. See the gradle tasks to understand how this all works. The logic for fetching the specs is in `airbyte-config:specs`.
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
*/
public class YamlSeedConfigPersistence implements ConfigPersistence {

public static Class<?> DEFAULT_SEED_DEFINITION_RESOURCE_CLASS = SeedType.class;
public static final Class<?> DEFAULT_SEED_DEFINITION_RESOURCE_CLASS = SeedType.class;

private static final Map<AirbyteConfig, SeedType> CONFIG_SCHEMA_MAP = Map.of(
ConfigSchema.STANDARD_SOURCE_DEFINITION, SeedType.STANDARD_SOURCE_DEFINITION,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@
- name: S3
destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
dockerRepository: airbyte/destination-s3
dockerImageTag: 0.2.9
dockerImageTag: 0.2.10
documentationUrl: https://docs.airbyte.io/integrations/destinations/s3
icon: s3.svg
resourceRequirements:
Expand All @@ -221,7 +221,7 @@
- name: Snowflake
destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
dockerRepository: airbyte/destination-snowflake
dockerImageTag: 0.4.17
dockerImageTag: 0.4.19
documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake
icon: snowflake.svg
resourceRequirements:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3454,7 +3454,7 @@
supported_destination_sync_modes:
- "append"
- "overwrite"
- dockerImage: "airbyte/destination-s3:0.2.9"
- dockerImage: "airbyte/destination-s3:0.2.10"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3"
connectionSpecification:
Expand Down Expand Up @@ -3825,7 +3825,7 @@
supported_destination_sync_modes:
- "overwrite"
- "append"
- dockerImage: "airbyte/destination-snowflake:0.4.17"
- dockerImage: "airbyte/destination-snowflake:0.4.19"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake"
connectionSpecification:
Expand All @@ -3844,9 +3844,12 @@
properties:
host:
description: "The host domain of the snowflake instance (must include the\
\ account, region, cloud environment, and end with snowflakecomputing.com)."
\ account, region, cloud environment, and end with snowflakecomputing.com).\
\ The account identifier differs depending on your cloud region, be sure\
\ to verify with Snowflake's documentation."
examples:
- "accountname.us-east-2.aws.snowflakecomputing.com"
- "accountname.snowflakecomputing.com"
type: "string"
title: "Host"
order: 0
Expand Down
12 changes: 12 additions & 0 deletions airbyte-config/models/src/main/java/io/airbyte/config/Configs.java
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,18 @@ public interface Configs {
*/
Map<String, String> getJobDefaultEnvMap();

/**
* Defines the number of consecutive job failures required before a connection is auto-disabled if
* the AUTO_DISABLE_FAILING_CONNECTIONS flag is set to true.
*/
int getMaxFailedJobsInARowBeforeConnectionDisable();

/**
* Defines the required number of days with only failed jobs before a connection is auto-disabled if
* the AUTO_DISABLE_FAILING_CONNECTIONS flag is set to true.
*/
int getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable();

// Jobs - Kube only
/**
* Define the check job container's minimum CPU request. Defaults to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,9 @@ public class EnvConfigs implements Configs {
private static final String SHOULD_RUN_SYNC_WORKFLOWS = "SHOULD_RUN_SYNC_WORKFLOWS";
private static final String SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS = "SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS";

private static final String MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = "MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE";
private static final String MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = "MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE";

// job-type-specific overrides
public static final String SPEC_JOB_KUBE_NODE_SELECTORS = "SPEC_JOB_KUBE_NODE_SELECTORS";
public static final String CHECK_JOB_KUBE_NODE_SELECTORS = "CHECK_JOB_KUBE_NODE_SELECTORS";
Expand Down Expand Up @@ -178,6 +181,9 @@ public class EnvConfigs implements Configs {

public static final int DEFAULT_TEMPORAL_HISTORY_RETENTION_IN_DAYS = 30;

public static final int DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = 100;
public static final int DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = 14;

private final Function<String, String> getEnv;
private final Supplier<Set<String>> getAllEnvKeys;
private final LogConfigs logConfigs;
Expand Down Expand Up @@ -661,6 +667,16 @@ public Map<String, String> getJobDefaultEnvMap() {
return MoreMaps.merge(jobPrefixedEnvMap, jobSharedEnvMap);
}

@Override
public int getMaxFailedJobsInARowBeforeConnectionDisable() {
return getEnvOrDefault(MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE, DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE);
}

@Override
public int getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable() {
return getEnvOrDefault(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE, DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE);
}

@Override
public String getCheckJobMainContainerCpuRequest() {
return getEnvOrDefault(CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST, getJobMainContainerCpuRequest());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
Expand Down Expand Up @@ -89,7 +90,7 @@ public List<String> tailCloudLog(final LogConfigs configs, final String logPath,
final var poppedBlob = descendingTimestampBlobs.remove(0);
try (final var inMemoryData = new ByteArrayOutputStream()) {
poppedBlob.downloadTo(inMemoryData);
final var currFileLines = inMemoryData.toString().split("\n");
final var currFileLines = inMemoryData.toString(StandardCharsets.UTF_8).split("\n");
final List<String> currFileLinesReversed = Lists.reverse(List.of(currFileLines));
for (final var line : currFileLinesReversed) {
if (linesRead == numLines) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ public List<String> getJobLogFile(final WorkerEnvironment workerEnvironment, fin
*/
@VisibleForTesting
public void deleteLogs(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final String logPath) {
if (logPath == null || logPath.equals(Path.of(""))) {
if (logPath == null || logPath.equals("")) {
return;
}

Expand Down
Loading