From 11a02915874a549080aba0ec6ba4f145787586cc Mon Sep 17 00:00:00 2001 From: terencecho Date: Fri, 4 Mar 2022 16:54:52 -0800 Subject: [PATCH 01/38] Add Disable Failing Connections feature --- .env | 1 + .../features/EnvVariableFeatureFlags.java | 7 + .../commons/features/FeatureFlags.java | 2 + .../java/io/airbyte/workers/WorkerApp.java | 4 +- .../ConnectionManagerWorkflowImpl.java | 65 +++++---- .../activities/DisableActivity.java | 35 +++++ .../activities/DisableActivityImpl.java | 72 ++++++++++ .../activities/DisableActivityTest.java | 127 ++++++++++++++++++ docker-compose.yaml | 1 + 9 files changed, 286 insertions(+), 28 deletions(-) create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java create mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java diff --git a/.env b/.env index 88db98daffe4..06de0bfa30dd 100644 --- a/.env +++ b/.env @@ -91,3 +91,4 @@ MAX_DISCOVER_WORKERS=5 ### FEATURE FLAGS ### NEW_SCHEDULER=false +DISABLE_FAILING_CONNECTIONS=false diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java index 04319ad82c62..f7214fde1d8e 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java @@ -16,4 +16,11 @@ public boolean usesNewScheduler() { return Boolean.parseBoolean(System.getenv("NEW_SCHEDULER")); } + @Override + public boolean disablesFailingConnections() { + log.info("Disable Failing Connections: " + Boolean.parseBoolean(System.getenv("DISABLE_FAILING_CONNECTIONS"))); + + return Boolean.parseBoolean(System.getenv("DISABLE_FAILING_CONNECTIONS")); + } + } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java index 02a5974c12ae..3a4f1c0ca11f 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java @@ -12,4 +12,6 @@ public interface FeatureFlags { boolean usesNewScheduler(); + boolean disablesFailingConnections(); + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java index 22a4444445b2..11b28ebe3f31 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java @@ -54,6 +54,7 @@ import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflowImpl; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivityImpl; +import io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivityImpl; import io.airbyte.workers.temporal.spec.SpecActivityImpl; @@ -177,7 +178,8 @@ private void registerConnectionManager(final WorkerFactory factory) { configRepository, jobCreator), new ConfigFetchActivityImpl(configRepository, jobPersistence, configs, () -> Instant.now().getEpochSecond()), - new ConnectionDeletionActivityImpl(connectionHelper)); + new ConnectionDeletionActivityImpl(connectionHelper), + new DisableActivityImpl(configRepository, jobPersistence)); } private void registerSync(final WorkerFactory factory) { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index 89786888ddb3..9fe45ffa1ddf 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -4,6 +4,8 @@ package io.airbyte.workers.temporal.scheduling; +import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.FailureReason; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; @@ -16,6 +18,8 @@ import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity.ConnectionDeletionInput; +import io.airbyte.workers.temporal.scheduling.activities.DisableActivity; +import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; @@ -42,6 +46,7 @@ import io.temporal.workflow.ChildWorkflowOptions; import io.temporal.workflow.Workflow; import java.time.Duration; +import java.time.Instant; import java.util.Set; import java.util.UUID; import java.util.function.Consumer; @@ -68,9 +73,13 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow Workflow.newActivityStub(ConfigFetchActivity.class, ActivityConfiguration.SHORT_ACTIVITY_OPTIONS); private final ConnectionDeletionActivity connectionDeletionActivity = Workflow.newActivityStub(ConnectionDeletionActivity.class, ActivityConfiguration.SHORT_ACTIVITY_OPTIONS); + private final DisableActivity disableActivity = + Workflow.newActivityStub(DisableActivity.class, ActivityConfiguration.SHORT_ACTIVITY_OPTIONS); private CancellationScope cancellableSyncWorkflow; + final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); + private UUID connectionId; public ConnectionManagerWorkflowImpl() {} @@ -82,7 +91,7 @@ public void run(final ConnectionUpdaterInput connectionUpdaterInput) throws Retr cancellableSyncWorkflow = generateSyncWorkflowRunnable(connectionUpdaterInput); cancellableSyncWorkflow.run(); } catch (final CanceledFailure cf) { - // When a scope is cancelled temporal will thow a CanceledFailure as you can see here: + // When a scope is cancelled temporal will throw a CanceledFailure as you can see here: // https://github.com/temporalio/sdk-java/blob/master/temporal-sdk/src/main/java/io/temporal/workflow/CancellationScope.java#L72 // The naming is very misleading, it is not a failure but the expected behavior... } @@ -108,7 +117,7 @@ public void run(final ConnectionUpdaterInput connectionUpdaterInput) throws Retr } } - private CancellationScope generateSyncWorkflowRunnable(ConnectionUpdaterInput connectionUpdaterInput) { + private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterInput connectionUpdaterInput) { return Workflow.newCancellationScope(() -> { connectionId = connectionUpdaterInput.getConnectionId(); @@ -122,7 +131,7 @@ private CancellationScope generateSyncWorkflowRunnable(ConnectionUpdaterInput co // resetConnection flag to the next run so that that run can execute the actual reset workflowState.setResetConnection(connectionUpdaterInput.isResetConnection()); - Duration timeToWait = getTimeToWait(connectionUpdaterInput.getConnectionId()); + final Duration timeToWait = getTimeToWait(connectionUpdaterInput.getConnectionId()); Workflow.await(timeToWait, () -> skipScheduling() || connectionUpdaterInput.isFromFailure()); @@ -189,7 +198,7 @@ private CancellationScope generateSyncWorkflowRunnable(ConnectionUpdaterInput co }); } - private void reportSuccess(final ConnectionUpdaterInput connectionUpdaterInput, StandardSyncOutput standardSyncOutput) { + private void reportSuccess(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { workflowState.setSuccess(true); runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( workflowInternalState.getJobId(), @@ -199,7 +208,7 @@ private void reportSuccess(final ConnectionUpdaterInput connectionUpdaterInput, resetNewConnectionInput(connectionUpdaterInput); } - private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, StandardSyncOutput standardSyncOutput) { + private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( workflowInternalState.getJobId(), workflowInternalState.getAttemptId(), @@ -220,7 +229,10 @@ private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobFailure, new JobFailureInput( connectionUpdaterInput.getJobId(), "Job failed after too many retries for connection " + connectionId)); - + if (featureFlags.disablesFailingConnections()) { + final DisableActivityInput disableActivityInput = new DisableActivityInput(connectionId, Instant.now()); + runMandatoryActivity(disableActivity::disableConnection, disableActivityInput); + } resetNewConnectionInput(connectionUpdaterInput); } } @@ -288,8 +300,8 @@ public WorkflowState getState() { @Override public JobInformation getJobInformation() { - Long jobId = workflowInternalState.getJobId(); - Integer attemptId = workflowInternalState.getAttemptId(); + final Long jobId = workflowInternalState.getJobId(); + final Integer attemptId = workflowInternalState.getAttemptId(); return new JobInformation( jobId == null ? NON_RUNNING_JOB_ID : jobId, attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId); @@ -297,8 +309,8 @@ public JobInformation getJobInformation() { @Override public QuarantinedInformation getQuarantinedInformation() { - Long jobId = workflowInternalState.getJobId(); - Integer attemptId = workflowInternalState.getAttemptId(); + final Long jobId = workflowInternalState.getJobId(); + final Integer attemptId = workflowInternalState.getAttemptId(); return new QuarantinedInformation( connectionId, jobId == null ? NON_RUNNING_JOB_ID : jobId, @@ -335,10 +347,10 @@ private void prepareForNextRunAndContinueAsNew(final ConnectionUpdaterInput conn * * We aimed to use this method for call of the temporal activity. */ - private OUTPUT runMandatoryActivityWithOutput(Function mapper, INPUT input) { + private OUTPUT runMandatoryActivityWithOutput(final Function mapper, final INPUT input) { try { return mapper.apply(input); - } catch (Exception e) { + } catch (final Exception e) { log.error("Failed to run an activity for the connection " + connectionId, e); workflowState.setQuarantined(true); workflowState.setRetryFailedActivity(false); @@ -353,7 +365,7 @@ private OUTPUT runMandatoryActivityWithOutput(Function void runMandatoryActivity(Consumer consumer, INPUT input) { + private void runMandatoryActivity(final Consumer consumer, final INPUT input) { runMandatoryActivityWithOutput((inputInternal) -> { consumer.accept(inputInternal); return null; @@ -369,7 +381,7 @@ private void runMandatoryActivity(Consumer consumer, INPUT input) * * Wait time is infinite If the workflow is manual or disabled since we never want to schedule this. */ - private Duration getTimeToWait(UUID connectionId) { + private Duration getTimeToWait(final UUID connectionId) { // Scheduling final ScheduleRetrieverInput scheduleRetrieverInput = new ScheduleRetrieverInput(connectionId); @@ -402,7 +414,7 @@ private Long getOrCreateJobId(final ConnectionUpdaterInput connectionUpdaterInpu /** * Create a new attempt for a given jobId */ - private Integer createAttemptId(long jobId) { + private Integer createAttemptId(final long jobId) { final AttemptCreationOutput attemptCreationOutput = runMandatoryActivityWithOutput( jobCreationAndStatusUpdateActivity::createNewAttempt, @@ -417,8 +429,8 @@ private Integer createAttemptId(long jobId) { * job and will generate a different output if the job is a sync or a reset. */ private GeneratedJobInput getJobInput() { - Long jobId = workflowInternalState.getJobId(); - Integer attemptId = workflowInternalState.getAttemptId(); + final Long jobId = workflowInternalState.getJobId(); + final Integer attemptId = workflowInternalState.getAttemptId(); final SyncInput getSyncInputActivitySyncInput = new SyncInput( attemptId, jobId, @@ -456,8 +468,8 @@ private void reportJobStarting() { * since the latter is a long running workflow, in the future, using a different Node pool would * make sense. >>>>>>> 76e969f2e5e1b869648142c3565b7375b1892999 */ - private StandardSyncOutput runChildWorkflow(GeneratedJobInput jobInputs) { - int taskQueueChangeVersion = + private StandardSyncOutput runChildWorkflow(final GeneratedJobInput jobInputs) { + final int taskQueueChangeVersion = Workflow.getVersion("task_queue_change_from_connection_updater_to_sync", Workflow.DEFAULT_VERSION, TASK_QUEUE_CHANGE_CURRENT_VERSION); String taskQueue = TemporalJobType.SYNC.name(); @@ -465,7 +477,6 @@ private StandardSyncOutput runChildWorkflow(GeneratedJobInput jobInputs) { if (taskQueueChangeVersion < TASK_QUEUE_CHANGE_CURRENT_VERSION) { taskQueue = TemporalJobType.CONNECTION_UPDATER.name(); } - final SyncWorkflow childSync = Workflow.newChildWorkflowStub(SyncWorkflow.class, ChildWorkflowOptions.newBuilder() .setWorkflowId("sync_" + workflowInternalState.getJobId()) @@ -487,8 +498,8 @@ private StandardSyncOutput runChildWorkflow(GeneratedJobInput jobInputs) { * * @return True if the job failed, false otherwise */ - private boolean getFailStatus(StandardSyncOutput standardSyncOutput) { - StandardSyncSummary standardSyncSummary = standardSyncOutput.getStandardSyncSummary(); + private boolean getFailStatus(final StandardSyncOutput standardSyncOutput) { + final StandardSyncSummary standardSyncSummary = standardSyncOutput.getStandardSyncSummary(); if (standardSyncSummary != null && standardSyncSummary.getStatus() == ReplicationStatus.FAILED) { workflowInternalState.getFailures().addAll(standardSyncOutput.getFailures()); @@ -513,12 +524,12 @@ private void deleteConnectionBeforeTerminatingTheWorkflow() { /** * Set a job as cancel and continue to the next job if and continue as a reset if needed */ - private void reportCancelledAndContinueWith(boolean isReset, ConnectionUpdaterInput connectionUpdaterInput) { + private void reportCancelledAndContinueWith(final boolean isReset, final ConnectionUpdaterInput connectionUpdaterInput) { workflowState.setContinueAsReset(isReset); - Long jobId = workflowInternalState.getJobId(); - Integer attemptId = workflowInternalState.getAttemptId(); - Set failures = workflowInternalState.getFailures(); - Boolean partialSuccess = workflowInternalState.getPartialSuccess(); + final Long jobId = workflowInternalState.getJobId(); + final Integer attemptId = workflowInternalState.getAttemptId(); + final Set failures = workflowInternalState.getFailures(); + final Boolean partialSuccess = workflowInternalState.getPartialSuccess(); runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, new JobCancelledInput( jobId, diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java new file mode 100644 index 000000000000..2835f3ff37db --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling.activities; + +import io.temporal.activity.ActivityInterface; +import io.temporal.activity.ActivityMethod; +import java.time.Instant; +import java.util.UUID; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +@ActivityInterface +public interface DisableActivity { + + @Data + @NoArgsConstructor + @AllArgsConstructor + class DisableActivityInput { + + private UUID connectionId; + + private Instant currTimestamp; + + } + + /** + * Delete a connection + */ + @ActivityMethod + void disableConnection(DisableActivityInput input); + +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java new file mode 100644 index 000000000000..52ce1581b075 --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling.activities; + +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.config.JobConfig.ConfigType; +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardSync.Status; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.scheduler.models.Job; +import io.airbyte.scheduler.models.JobStatus; +import io.airbyte.scheduler.persistence.JobPersistence; +import io.airbyte.workers.temporal.exception.RetryableException; +import java.time.temporal.ChronoUnit; +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@AllArgsConstructor +@Slf4j +public class DisableActivityImpl implements DisableActivity { + + @VisibleForTesting + public static final int MAX_FAILURE_JOBS_IN_A_ROW = 100; + @VisibleForTesting + public static final int MAX_DAYS_OF_STRAIGHT_FAILURE = 14; + + private final ConfigRepository configRepository; + private JobPersistence jobPersistence; + + // if no successful sync jobs in the last MAX_FAILURE_JOBS_IN_A_ROW job attempts or the last + // MAX_DAYS_OF_STRAIGHT_FAILURE days (minimum 1 job attempt): disable connection to prevent wasting + // resources + + @Override + public void disableConnection(final DisableActivityInput input) { + try { + // lists job in descending order by created_at + final List jobs = jobPersistence.listJobs(ConfigType.SYNC, + input.getCurrTimestamp().minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS)); + if (jobs.size() == 0) + return; + + boolean shouldDisable = true; + int numFailures = 0; + + // jobs are sorted by jobs' createdAt in ascending order + for (int i = jobs.size() - 1; i >= 0; i--) { + final JobStatus jobStatus = jobs.get(i).getStatus(); + if (jobStatus == JobStatus.FAILED) { + numFailures++; + if (numFailures == MAX_FAILURE_JOBS_IN_A_ROW) + break; + } else if (jobStatus == JobStatus.SUCCEEDED) { + shouldDisable = false; + break; + } + } + + if (shouldDisable) { + final StandardSync standardSync = configRepository.getStandardSync(input.getConnectionId()); + standardSync.setStatus(Status.INACTIVE); + configRepository.writeStandardSync(standardSync); + } + } catch (final Exception e) { + throw new RetryableException(e); + } + } + +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java new file mode 100644 index 000000000000..d1e7afab3a8f --- /dev/null +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling.activities; + +import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_DAYS_OF_STRAIGHT_FAILURE; +import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_FAILURE_JOBS_IN_A_ROW; + +import io.airbyte.config.JobConfig.ConfigType; +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardSync.Status; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.scheduler.models.Job; +import io.airbyte.scheduler.models.JobStatus; +import io.airbyte.scheduler.persistence.JobPersistence; +import io.airbyte.validation.json.JsonValidationException; +import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class DisableActivityTest { + + @Mock + private ConfigRepository mConfigRepository; + + @Mock + private JobPersistence mJobPersistence; + + @Mock + private Job mJobFailure; + + @Mock + private Job mJobSuccess; + + @InjectMocks + private DisableActivityImpl disableActivity; + + private final static UUID connectionId = UUID.randomUUID(); + private final static StandardSync standardSync = new StandardSync(); + private static final Instant currInstant = Instant.now(); + private static final DisableActivityInput input = new DisableActivityInput(connectionId, currInstant); + + @BeforeEach + void setUp() { + standardSync.setStatus(Status.ACTIVE); + } + + @Nested + class DisableConnectionTest { + + @Test + @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") + public void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { + // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW failures + final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); + jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, mJobFailure)); + + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(jobs); + Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); + Mockito.when(mConfigRepository.getStandardSync(connectionId)) + .thenReturn(standardSync); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); + } + + @Test + @DisplayName("Test that the connection is _not_ disabled after MAX_FAILURE_JOBS_IN_A_ROW - 1 straight failures") + public void testLessThanMaxFailuresInARow() throws IOException { + // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW-1 failures + final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); + jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, mJobFailure)); + + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(jobs); + Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); + Mockito.when(mJobSuccess.getStatus()).thenReturn(JobStatus.SUCCEEDED); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); + } + + @Test + @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") + public void testNoRuns() throws IOException { + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(Collections.emptyList()); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); + } + + @Test + @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") + public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(Collections.singletonList(mJobFailure)); + Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); + Mockito.when(mConfigRepository.getStandardSync(connectionId)) + .thenReturn(standardSync); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); + } + + } + +} diff --git a/docker-compose.yaml b/docker-compose.yaml index 0eccd58cc660..cbc77c4f57f3 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -100,6 +100,7 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_URL=${DATABASE_URL} - DATABASE_USER=${DATABASE_USER} + - DISABLE_FAILING_CONNECTIONS=${DISABLE_FAILING_CONNECTIONS} - JOB_MAIN_CONTAINER_CPU_LIMIT=${JOB_MAIN_CONTAINER_CPU_LIMIT} - JOB_MAIN_CONTAINER_CPU_REQUEST=${JOB_MAIN_CONTAINER_CPU_REQUEST} - JOB_MAIN_CONTAINER_MEMORY_LIMIT=${JOB_MAIN_CONTAINER_MEMORY_LIMIT} From 2d23680bad5296df111021067e6e332cb4de173d Mon Sep 17 00:00:00 2001 From: terencecho Date: Mon, 7 Mar 2022 14:09:40 -0800 Subject: [PATCH 02/38] Rename and cleanup --- .env | 2 +- .../java/io/airbyte/workers/WorkerApp.java | 4 ++-- .../ConnectionManagerWorkflowImpl.java | 13 ++++++------ ...ava => AutoDisableConnectionActivity.java} | 10 ++++++---- ...=> AutoDisableConnectionActivityImpl.java} | 9 ++------- ...=> AutoDisableConnectionActivityTest.java} | 20 +++++++++---------- docker-compose.yaml | 2 +- 7 files changed, 29 insertions(+), 31 deletions(-) rename airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/{DisableActivity.java => AutoDisableConnectionActivity.java} (56%) rename airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/{DisableActivityImpl.java => AutoDisableConnectionActivityImpl.java} (83%) rename airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/{DisableActivityTest.java => AutoDisableConnectionActivityTest.java} (86%) diff --git a/.env b/.env index 06de0bfa30dd..4c28012c6b5b 100644 --- a/.env +++ b/.env @@ -91,4 +91,4 @@ MAX_DISCOVER_WORKERS=5 ### FEATURE FLAGS ### NEW_SCHEDULER=false -DISABLE_FAILING_CONNECTIONS=false +AUTO_DISABLE_FAILING_CONNECTIONS=false diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java index 11b28ebe3f31..e29f8f3e6f6c 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java @@ -52,9 +52,9 @@ import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogActivityImpl; import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogWorkflowImpl; import io.airbyte.workers.temporal.scheduling.ConnectionManagerWorkflowImpl; +import io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivityImpl; -import io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivityImpl; import io.airbyte.workers.temporal.spec.SpecActivityImpl; @@ -179,7 +179,7 @@ private void registerConnectionManager(final WorkerFactory factory) { jobCreator), new ConfigFetchActivityImpl(configRepository, jobPersistence, configs, () -> Instant.now().getEpochSecond()), new ConnectionDeletionActivityImpl(connectionHelper), - new DisableActivityImpl(configRepository, jobPersistence)); + new AutoDisableConnectionActivityImpl(configRepository, jobPersistence)); } private void registerSync(final WorkerFactory factory) { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index 9fe45ffa1ddf..cfbe4aa58a88 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -13,13 +13,13 @@ import io.airbyte.workers.helper.FailureHelper; import io.airbyte.workers.temporal.TemporalJobType; import io.airbyte.workers.temporal.exception.RetryableException; +import io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivity; +import io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivity.AutoDisableConnectionActivityInput; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverInput; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity.ConnectionDeletionInput; -import io.airbyte.workers.temporal.scheduling.activities.DisableActivity; -import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; @@ -73,8 +73,8 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow Workflow.newActivityStub(ConfigFetchActivity.class, ActivityConfiguration.SHORT_ACTIVITY_OPTIONS); private final ConnectionDeletionActivity connectionDeletionActivity = Workflow.newActivityStub(ConnectionDeletionActivity.class, ActivityConfiguration.SHORT_ACTIVITY_OPTIONS); - private final DisableActivity disableActivity = - Workflow.newActivityStub(DisableActivity.class, ActivityConfiguration.SHORT_ACTIVITY_OPTIONS); + private final AutoDisableConnectionActivity autoDisableConnectionActivity = + Workflow.newActivityStub(AutoDisableConnectionActivity.class, ActivityConfiguration.SHORT_ACTIVITY_OPTIONS); private CancellationScope cancellableSyncWorkflow; @@ -230,8 +230,9 @@ private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, connectionUpdaterInput.getJobId(), "Job failed after too many retries for connection " + connectionId)); if (featureFlags.disablesFailingConnections()) { - final DisableActivityInput disableActivityInput = new DisableActivityInput(connectionId, Instant.now()); - runMandatoryActivity(disableActivity::disableConnection, disableActivityInput); + final AutoDisableConnectionActivityInput autoDisableConnectionActivityInput = + new AutoDisableConnectionActivityInput(connectionId, Instant.now()); + runMandatoryActivity(autoDisableConnectionActivity::autoDisableFailingConnection, autoDisableConnectionActivityInput); } resetNewConnectionInput(connectionUpdaterInput); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivity.java similarity index 56% rename from airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java rename to airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivity.java index 2835f3ff37db..1edc876d5777 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivity.java @@ -13,12 +13,12 @@ import lombok.NoArgsConstructor; @ActivityInterface -public interface DisableActivity { +public interface AutoDisableConnectionActivity { @Data @NoArgsConstructor @AllArgsConstructor - class DisableActivityInput { + class AutoDisableConnectionActivityInput { private UUID connectionId; @@ -27,9 +27,11 @@ class DisableActivityInput { } /** - * Delete a connection + * Disable a connection if no successful sync jobs in the last MAX_FAILURE_JOBS_IN_A_ROW job + * attempts or the last MAX_DAYS_OF_STRAIGHT_FAILURE days (minimum 1 job attempt): disable + * connection to prevent wasting resources */ @ActivityMethod - void disableConnection(DisableActivityInput input); + void autoDisableFailingConnection(AutoDisableConnectionActivityInput input); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java similarity index 83% rename from airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java rename to airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java index 52ce1581b075..4348b8beb4eb 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java @@ -20,7 +20,7 @@ @AllArgsConstructor @Slf4j -public class DisableActivityImpl implements DisableActivity { +public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionActivity { @VisibleForTesting public static final int MAX_FAILURE_JOBS_IN_A_ROW = 100; @@ -30,14 +30,9 @@ public class DisableActivityImpl implements DisableActivity { private final ConfigRepository configRepository; private JobPersistence jobPersistence; - // if no successful sync jobs in the last MAX_FAILURE_JOBS_IN_A_ROW job attempts or the last - // MAX_DAYS_OF_STRAIGHT_FAILURE days (minimum 1 job attempt): disable connection to prevent wasting - // resources - @Override - public void disableConnection(final DisableActivityInput input) { + public void autoDisableFailingConnection(final AutoDisableConnectionActivityInput input) { try { - // lists job in descending order by created_at final List jobs = jobPersistence.listJobs(ConfigType.SYNC, input.getCurrTimestamp().minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS)); if (jobs.size() == 0) diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java similarity index 86% rename from airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java rename to airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index d1e7afab3a8f..f64bf8203da8 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -4,8 +4,8 @@ package io.airbyte.workers.temporal.scheduling.activities; -import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_DAYS_OF_STRAIGHT_FAILURE; -import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_FAILURE_JOBS_IN_A_ROW; +import static io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl.MAX_DAYS_OF_STRAIGHT_FAILURE; +import static io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl.MAX_FAILURE_JOBS_IN_A_ROW; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.StandardSync; @@ -16,7 +16,7 @@ import io.airbyte.scheduler.models.JobStatus; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; +import io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivity.AutoDisableConnectionActivityInput; import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; @@ -36,7 +36,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class DisableActivityTest { +class AutoDisableConnectionActivityTest { @Mock private ConfigRepository mConfigRepository; @@ -51,12 +51,12 @@ class DisableActivityTest { private Job mJobSuccess; @InjectMocks - private DisableActivityImpl disableActivity; + private AutoDisableConnectionActivityImpl disableActivity; private final static UUID connectionId = UUID.randomUUID(); private final static StandardSync standardSync = new StandardSync(); private static final Instant currInstant = Instant.now(); - private static final DisableActivityInput input = new DisableActivityInput(connectionId, currInstant); + private static final AutoDisableConnectionActivityInput input = new AutoDisableConnectionActivityInput(connectionId, currInstant); @BeforeEach void setUp() { @@ -79,7 +79,7 @@ public void testMaxFailuresInARow() throws IOException, JsonValidationException, Mockito.when(mConfigRepository.getStandardSync(connectionId)) .thenReturn(standardSync); - disableActivity.disableConnection(input); + disableActivity.autoDisableFailingConnection(input); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); } @@ -95,7 +95,7 @@ public void testLessThanMaxFailuresInARow() throws IOException { Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); Mockito.when(mJobSuccess.getStatus()).thenReturn(JobStatus.SUCCEEDED); - disableActivity.disableConnection(input); + disableActivity.autoDisableFailingConnection(input); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); } @@ -105,7 +105,7 @@ public void testNoRuns() throws IOException { Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) .thenReturn(Collections.emptyList()); - disableActivity.disableConnection(input); + disableActivity.autoDisableFailingConnection(input); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); } @@ -118,7 +118,7 @@ public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationExcept Mockito.when(mConfigRepository.getStandardSync(connectionId)) .thenReturn(standardSync); - disableActivity.disableConnection(input); + disableActivity.autoDisableFailingConnection(input); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); } diff --git a/docker-compose.yaml b/docker-compose.yaml index cbc77c4f57f3..303954df5893 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -93,6 +93,7 @@ services: restart: unless-stopped environment: - AIRBYTE_VERSION=${VERSION} + - AUTO_DISABLE_FAILING_CONNECTIONS=${AUTO_DISABLE_FAILING_CONNECTIONS} - CONFIG_DATABASE_PASSWORD=${CONFIG_DATABASE_PASSWORD:-} - CONFIG_DATABASE_URL=${CONFIG_DATABASE_URL:-} - CONFIG_DATABASE_USER=${CONFIG_DATABASE_USER:-} @@ -100,7 +101,6 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_URL=${DATABASE_URL} - DATABASE_USER=${DATABASE_USER} - - DISABLE_FAILING_CONNECTIONS=${DISABLE_FAILING_CONNECTIONS} - JOB_MAIN_CONTAINER_CPU_LIMIT=${JOB_MAIN_CONTAINER_CPU_LIMIT} - JOB_MAIN_CONTAINER_CPU_REQUEST=${JOB_MAIN_CONTAINER_CPU_REQUEST} - JOB_MAIN_CONTAINER_MEMORY_LIMIT=${JOB_MAIN_CONTAINER_MEMORY_LIMIT} From 100aab5296dbe6e2f246670c47e85c91e22fabfc Mon Sep 17 00:00:00 2001 From: terencecho Date: Wed, 9 Mar 2022 00:32:58 -0800 Subject: [PATCH 03/38] list jobs based off connection id --- .../persistence/DefaultJobPersistence.java | 16 ++++ .../scheduler/persistence/JobPersistence.java | 11 ++- .../DefaultJobPersistenceTest.java | 46 ++++++++++++ .../java/io/airbyte/workers/WorkerApp.java | 3 +- .../ConnectionManagerWorkflowImpl.java | 10 +-- .../AutoDisableConnectionActivityImpl.java | 54 +++++++------- .../AutoDisableConnectionActivityTest.java | 73 +++++++++---------- 7 files changed, 143 insertions(+), 70 deletions(-) diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java index 048ef4279b76..8a75bd911105 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java @@ -5,6 +5,7 @@ package io.airbyte.scheduler.persistence; import static io.airbyte.db.instance.jobs.jooq.Tables.ATTEMPTS; +import static io.airbyte.db.instance.jobs.jooq.tables.Jobs.JOBS; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.JsonNodeType; @@ -26,6 +27,7 @@ import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import io.airbyte.db.instance.jobs.JobsDatabaseSchema; +import io.airbyte.db.instance.jobs.jooq.enums.JobConfigType; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.scheduler.models.Attempt; import io.airbyte.scheduler.models.AttemptStatus; @@ -385,6 +387,20 @@ public List listJobsWithStatus(final ConfigType configType, final JobStatus return listJobsWithStatus(Sets.newHashSet(configType), status); } + @Override + public List listJobStatusWithConnection(final UUID connectionId, final ConfigType configType, final Instant jobCreatedAtTimestamp) + throws IOException { + return jobDatabase.query(ctx -> ctx.select(JOBS.STATUS) + .from(JOBS) + .where(JOBS.SCOPE.eq(connectionId.toString())) + .and(JOBS.CONFIG_TYPE.eq(JobConfigType.sync)) // todo: can i change this to use configType? + .and(JOBS.CREATED_AT.greaterOrEqual(OffsetDateTime.ofInstant(jobCreatedAtTimestamp, ZoneOffset.UTC))) + .fetch() + .stream() + .flatMap(row -> Stream.of(JobStatus.valueOf(row.value1().getLiteral().toUpperCase()))) + .collect(Collectors.toList())); + } + @Override public Optional getLastReplicationJob(final UUID connectionId) throws IOException { return jobDatabase.query(ctx -> ctx diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java index 2cfd994cb029..887119da2a15 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java @@ -145,7 +145,6 @@ public interface JobPersistence { List listJobs(Set configTypes, String configId, int limit, int offset) throws IOException; /** - * * @param configType The type of job * @param attemptEndedAtTimestamp The timestamp after which you want the jobs * @return List of jobs that have attempts after the provided timestamp @@ -161,6 +160,16 @@ public interface JobPersistence { List listJobsWithStatus(JobConfig.ConfigType configType, JobStatus status) throws IOException; + /** + * @param connectionId The ID of the connection + * @param configType The type of job + * @param jobCreatedAtTimestamp The timestamp after which you want the jobs + * @return List of job statuses from a specific connection that have attempts after the provided + * timestamp, sorted by jobs' createAt in descending order + * @throws IOException + */ + List listJobStatusWithConnection(UUID connectionId, ConfigType configType, Instant jobCreatedAtTimestamp) throws IOException; + Optional getLastReplicationJob(UUID connectionId) throws IOException; Optional getNextJob() throws IOException; diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java index 903cdc5e7fc9..f6d5d16499f6 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java @@ -1315,4 +1315,50 @@ private Job addStateToJob(final Job job) throws IOException, SQLException { } + @Test + @DisplayName("Should list all job statuses at different stages, as well as ones filtered by timestamp") + public void listJobStatusWithConnection() throws IOException { + final Instant now = Instant.parse("2021-01-01T00:00:00Z"); + final Supplier timeSupplier = incrementingSecondSupplier(now); + + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); + final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); + final Path syncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); + final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId, syncJobSecondAttemptLogPath); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber1); + + final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(jobStatuses.size(), 1); + assertEquals(JobStatus.INCOMPLETE, jobStatuses.get(0)); + + final Path syncJobThirdAttemptLogPath = LOG_PATH.resolve("3"); + final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId, syncJobThirdAttemptLogPath); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber2); + jobPersistence.failJob(syncJobId); + + final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int newSyncJobAttemptNumber0 = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); + jobPersistence.failAttempt(newSyncJobId, newSyncJobAttemptNumber0); + final Path newSyncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); + final int newSyncJobAttemptNumber1 = jobPersistence.createAttempt(newSyncJobId, newSyncJobSecondAttemptLogPath); + jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber1); + + final List allQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(2, allQueryJobStatuses.size()); + assertEquals(JobStatus.FAILED, allQueryJobStatuses.get(0)); + assertEquals(JobStatus.SUCCEEDED, allQueryJobStatuses.get(1)); + + final List jobs = jobPersistence.listJobs(ConfigType.SYNC, Instant.EPOCH); + final Long maxCreatedAtTimestamp = jobs.get(1).getCreatedAtInSecond(); + + // todo: look into why the older job isn't being filtered out + final List secondQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.ofEpochSecond(maxCreatedAtTimestamp)); + assertEquals(1, secondQueryJobStatuses.size()); + assertEquals(JobStatus.SUCCEEDED, secondQueryJobStatuses.get(0)); + + assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.ofEpochSecond(maxCreatedAtTimestamp + 1)).size()); + } + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java index e29f8f3e6f6c..964a279a3de1 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java @@ -160,6 +160,7 @@ public void start() { private void registerConnectionManager(final WorkerFactory factory) { final JobCreator jobCreator = new DefaultJobCreator(jobPersistence, configRepository, defaultWorkerConfigs.getResourceRequirements()); + final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); final Worker connectionUpdaterWorker = factory.newWorker(TemporalJobType.CONNECTION_UPDATER.toString(), getWorkerOptions(maxWorkers.getMaxSyncWorkers())); @@ -179,7 +180,7 @@ private void registerConnectionManager(final WorkerFactory factory) { jobCreator), new ConfigFetchActivityImpl(configRepository, jobPersistence, configs, () -> Instant.now().getEpochSecond()), new ConnectionDeletionActivityImpl(connectionHelper), - new AutoDisableConnectionActivityImpl(configRepository, jobPersistence)); + new AutoDisableConnectionActivityImpl(configRepository, jobPersistence, featureFlags)); } private void registerSync(final WorkerFactory factory) { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index cfbe4aa58a88..6d7630a15d90 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -4,8 +4,6 @@ package io.airbyte.workers.temporal.scheduling; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.FailureReason; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; @@ -60,6 +58,7 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow public static final int NON_RUNNING_ATTEMPT_ID = -1; private static final int TASK_QUEUE_CHANGE_CURRENT_VERSION = 1; + private static final int DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION = 1; private WorkflowState workflowState = new WorkflowState(UUID.randomUUID(), new NoopStateListener()); @@ -78,8 +77,6 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow private CancellationScope cancellableSyncWorkflow; - final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); - private UUID connectionId; public ConnectionManagerWorkflowImpl() {} @@ -229,7 +226,10 @@ private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobFailure, new JobFailureInput( connectionUpdaterInput.getJobId(), "Job failed after too many retries for connection " + connectionId)); - if (featureFlags.disablesFailingConnections()) { + final int attemptCreationVersion = + Workflow.getVersion("disable_failing_connection", Workflow.DEFAULT_VERSION, DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION); + + if (attemptCreationVersion >= DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION) { final AutoDisableConnectionActivityInput autoDisableConnectionActivityInput = new AutoDisableConnectionActivityInput(connectionId, Instant.now()); runMandatoryActivity(autoDisableConnectionActivity::autoDisableFailingConnection, autoDisableConnectionActivityInput); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java index 4348b8beb4eb..64b305b21930 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java @@ -5,21 +5,19 @@ package io.airbyte.workers.temporal.scheduling.activities; import com.google.common.annotations.VisibleForTesting; +import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.models.JobStatus; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.workers.temporal.exception.RetryableException; import java.time.temporal.ChronoUnit; import java.util.List; import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; @AllArgsConstructor -@Slf4j public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionActivity { @VisibleForTesting @@ -29,38 +27,42 @@ public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionA private final ConfigRepository configRepository; private JobPersistence jobPersistence; + private FeatureFlags featureFlags; @Override public void autoDisableFailingConnection(final AutoDisableConnectionActivityInput input) { - try { - final List jobs = jobPersistence.listJobs(ConfigType.SYNC, - input.getCurrTimestamp().minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS)); - if (jobs.size() == 0) - return; + if (featureFlags.disablesFailingConnections()) { - boolean shouldDisable = true; - int numFailures = 0; + try { + final List jobStatuses = jobPersistence.listJobStatusWithConnection(input.getConnectionId(), ConfigType.SYNC, + input.getCurrTimestamp().minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS)); - // jobs are sorted by jobs' createdAt in ascending order - for (int i = jobs.size() - 1; i >= 0; i--) { - final JobStatus jobStatus = jobs.get(i).getStatus(); - if (jobStatus == JobStatus.FAILED) { - numFailures++; - if (numFailures == MAX_FAILURE_JOBS_IN_A_ROW) + if (jobStatuses.size() == 0) + return; + + boolean shouldDisable = true; + int numFailures = 0; + + // jobs are sorted from most recent to least recent + for (final JobStatus jobStatus : jobStatuses) { + if (jobStatus == JobStatus.FAILED) { + numFailures++; + if (numFailures == MAX_FAILURE_JOBS_IN_A_ROW) + break; + } else if (jobStatus == JobStatus.SUCCEEDED) { + shouldDisable = false; break; - } else if (jobStatus == JobStatus.SUCCEEDED) { - shouldDisable = false; - break; + } } - } - if (shouldDisable) { - final StandardSync standardSync = configRepository.getStandardSync(input.getConnectionId()); - standardSync.setStatus(Status.INACTIVE); - configRepository.writeStandardSync(standardSync); + if (shouldDisable) { + final StandardSync standardSync = configRepository.getStandardSync(input.getConnectionId()); + standardSync.setStatus(Status.INACTIVE); + configRepository.writeStandardSync(standardSync); + } + } catch (final Exception e) { + throw new RetryableException(e); } - } catch (final Exception e) { - throw new RetryableException(e); } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index f64bf8203da8..56f377605836 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -7,12 +7,12 @@ import static io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl.MAX_DAYS_OF_STRAIGHT_FAILURE; import static io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl.MAX_FAILURE_JOBS_IN_A_ROW; +import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.models.JobStatus; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.validation.json.JsonValidationException; @@ -39,24 +39,22 @@ class AutoDisableConnectionActivityTest { @Mock - private ConfigRepository mConfigRepository; + private FeatureFlags mFeatureFlags; @Mock - private JobPersistence mJobPersistence; - - @Mock - private Job mJobFailure; + private ConfigRepository mConfigRepository; @Mock - private Job mJobSuccess; + private JobPersistence mJobPersistence; @InjectMocks - private AutoDisableConnectionActivityImpl disableActivity; + private AutoDisableConnectionActivityImpl autoDisableActivity; - private final static UUID connectionId = UUID.randomUUID(); - private final static StandardSync standardSync = new StandardSync(); + private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final Instant currInstant = Instant.now(); - private static final AutoDisableConnectionActivityInput input = new AutoDisableConnectionActivityInput(connectionId, currInstant); + private static final AutoDisableConnectionActivityInput ACTIVITY_INPUT = new AutoDisableConnectionActivityInput(CONNECTION_ID, currInstant); + + private final StandardSync standardSync = new StandardSync(); @BeforeEach void setUp() { @@ -64,61 +62,62 @@ void setUp() { } @Nested - class DisableConnectionTest { + class AutoDisableConnectionTest { @Test @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") public void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { - // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW failures - final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); - jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, mJobFailure)); - - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(jobs); - Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); - Mockito.when(mConfigRepository.getStandardSync(connectionId)) + Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); + + // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW and 1 success + final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, JobStatus.FAILED)); + jobStatuses.add(JobStatus.SUCCEEDED); + + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(jobStatuses); + Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)) .thenReturn(standardSync); - disableActivity.autoDisableFailingConnection(input); + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); } @Test @DisplayName("Test that the connection is _not_ disabled after MAX_FAILURE_JOBS_IN_A_ROW - 1 straight failures") public void testLessThanMaxFailuresInARow() throws IOException { - // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW-1 failures - final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); - jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, mJobFailure)); + // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW-1 and 1 success + final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, JobStatus.FAILED)); + jobStatuses.add(JobStatus.SUCCEEDED); - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(jobs); - Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); - Mockito.when(mJobSuccess.getStatus()).thenReturn(JobStatus.SUCCEEDED); + Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(jobStatuses); - disableActivity.autoDisableFailingConnection(input); + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); } @Test @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") public void testNoRuns() throws IOException { - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(Collections.emptyList()); + Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(Collections.emptyList()); - disableActivity.autoDisableFailingConnection(input); + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); } @Test @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(Collections.singletonList(mJobFailure)); - Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); - Mockito.when(mConfigRepository.getStandardSync(connectionId)) + Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(Collections.singletonList(JobStatus.FAILED)); + Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)) .thenReturn(standardSync); - disableActivity.autoDisableFailingConnection(input); + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); } From 600246d37e05e83e3acd5b89dc12327c90efdf75 Mon Sep 17 00:00:00 2001 From: terencecho Date: Thu, 10 Mar 2022 14:16:58 -0800 Subject: [PATCH 04/38] Move variables to env config and update unit tests --- .../main/java/io/airbyte/config/Configs.java | 12 +++ .../java/io/airbyte/config/EnvConfigs.java | 16 ++++ .../persistence/DefaultJobPersistence.java | 21 ++-- .../DefaultJobPersistenceTest.java | 95 ++++++++++--------- .../java/io/airbyte/workers/WorkerApp.java | 2 +- .../AutoDisableConnectionActivityImpl.java | 15 +-- .../AutoDisableConnectionActivityTest.java | 36 +++---- 7 files changed, 114 insertions(+), 83 deletions(-) diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java b/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java index 33c7e1e8497a..0a8b8ac758b9 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/Configs.java @@ -242,6 +242,18 @@ public interface Configs { */ Map getJobDefaultEnvMap(); + /** + * Defines the number of consecutive job failures required before a connection is auto-disabled if + * the AUTO_DISABLE_FAILING_CONNECTIONS flag is set to true. + */ + int getMaxFailedJobsInARowBeforeConnectionDisable(); + + /** + * Defines the required number of days with only failed jobs before a connection is auto-disabled if + * the AUTO_DISABLE_FAILING_CONNECTIONS flag is set to true. + */ + int getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(); + // Jobs - Kube only /** * Define the check job container's minimum CPU request. Defaults to diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java index fe80f65856ad..f6a16f0e9364 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java @@ -117,6 +117,9 @@ public class EnvConfigs implements Configs { private static final String SHOULD_RUN_SYNC_WORKFLOWS = "SHOULD_RUN_SYNC_WORKFLOWS"; private static final String SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS = "SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS"; + private static final String MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = "MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE"; + private static final String MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = "MAX_DAYS_OF_STRAIGHT_FAILURE_BEFORE_CONNECTION_DISABLE"; + // job-type-specific overrides public static final String SPEC_JOB_KUBE_NODE_SELECTORS = "SPEC_JOB_KUBE_NODE_SELECTORS"; public static final String CHECK_JOB_KUBE_NODE_SELECTORS = "CHECK_JOB_KUBE_NODE_SELECTORS"; @@ -169,6 +172,9 @@ public class EnvConfigs implements Configs { public static final int DEFAULT_TEMPORAL_HISTORY_RETENTION_IN_DAYS = 30; + public static final int DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = 100; + public static final int DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = 14; + private final Function getEnv; private final Supplier> getAllEnvKeys; private final LogConfigs logConfigs; @@ -633,6 +639,16 @@ public Map getJobDefaultEnvMap() { .collect(Collectors.toMap(key -> key.replace(JOB_DEFAULT_ENV_PREFIX, ""), getEnv)); } + @Override + public int getMaxFailedJobsInARowBeforeConnectionDisable() { + return getEnvOrDefault(MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE, DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE); + } + + @Override + public int getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable() { + return getEnvOrDefault(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE, DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE); + } + @Override public String getCheckJobMainContainerCpuRequest() { return getEnvOrDefault(CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST, getJobMainContainerCpuRequest()); diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java index 8a75bd911105..0113ee6bf555 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java @@ -5,7 +5,6 @@ package io.airbyte.scheduler.persistence; import static io.airbyte.db.instance.jobs.jooq.Tables.ATTEMPTS; -import static io.airbyte.db.instance.jobs.jooq.tables.Jobs.JOBS; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.JsonNodeType; @@ -27,7 +26,6 @@ import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import io.airbyte.db.instance.jobs.JobsDatabaseSchema; -import io.airbyte.db.instance.jobs.jooq.enums.JobConfigType; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.scheduler.models.Attempt; import io.airbyte.scheduler.models.AttemptStatus; @@ -390,15 +388,18 @@ public List listJobsWithStatus(final ConfigType configType, final JobStatus @Override public List listJobStatusWithConnection(final UUID connectionId, final ConfigType configType, final Instant jobCreatedAtTimestamp) throws IOException { - return jobDatabase.query(ctx -> ctx.select(JOBS.STATUS) - .from(JOBS) - .where(JOBS.SCOPE.eq(connectionId.toString())) - .and(JOBS.CONFIG_TYPE.eq(JobConfigType.sync)) // todo: can i change this to use configType? - .and(JOBS.CREATED_AT.greaterOrEqual(OffsetDateTime.ofInstant(jobCreatedAtTimestamp, ZoneOffset.UTC))) - .fetch() + final LocalDateTime timeConvertedIntoLocalDateTime = LocalDateTime.ofInstant(jobCreatedAtTimestamp, ZoneOffset.UTC); + + final String JobStatusSelect = "SELECT status FROM jobs "; + return jobDatabase.query(ctx -> ctx + .fetch(JobStatusSelect + "WHERE " + + "CAST(scope AS VARCHAR) = ? AND " + + "CAST(config_type AS VARCHAR) = ? AND " + + "created_at >= ? ORDER BY created_at DESC", connectionId.toString(), + Sqls.toSqlName(configType), timeConvertedIntoLocalDateTime)) .stream() - .flatMap(row -> Stream.of(JobStatus.valueOf(row.value1().getLiteral().toUpperCase()))) - .collect(Collectors.toList())); + .flatMap(r -> Stream.of(JobStatus.valueOf(r.get("status", String.class).toUpperCase()))) + .collect(Collectors.toList()); } @Override diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java index f6d5d16499f6..0c6dcb4fe956 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java @@ -1315,50 +1315,55 @@ private Job addStateToJob(final Job job) throws IOException, SQLException { } - @Test - @DisplayName("Should list all job statuses at different stages, as well as ones filtered by timestamp") - public void listJobStatusWithConnection() throws IOException { - final Instant now = Instant.parse("2021-01-01T00:00:00Z"); - final Supplier timeSupplier = incrementingSecondSupplier(now); - - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); - final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); - final Path syncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); - final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId, syncJobSecondAttemptLogPath); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber1); - - final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); - assertEquals(jobStatuses.size(), 1); - assertEquals(JobStatus.INCOMPLETE, jobStatuses.get(0)); - - final Path syncJobThirdAttemptLogPath = LOG_PATH.resolve("3"); - final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId, syncJobThirdAttemptLogPath); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber2); - jobPersistence.failJob(syncJobId); - - final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int newSyncJobAttemptNumber0 = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); - jobPersistence.failAttempt(newSyncJobId, newSyncJobAttemptNumber0); - final Path newSyncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); - final int newSyncJobAttemptNumber1 = jobPersistence.createAttempt(newSyncJobId, newSyncJobSecondAttemptLogPath); - jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber1); - - final List allQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); - assertEquals(2, allQueryJobStatuses.size()); - assertEquals(JobStatus.FAILED, allQueryJobStatuses.get(0)); - assertEquals(JobStatus.SUCCEEDED, allQueryJobStatuses.get(1)); - - final List jobs = jobPersistence.listJobs(ConfigType.SYNC, Instant.EPOCH); - final Long maxCreatedAtTimestamp = jobs.get(1).getCreatedAtInSecond(); - - // todo: look into why the older job isn't being filtered out - final List secondQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.ofEpochSecond(maxCreatedAtTimestamp)); - assertEquals(1, secondQueryJobStatuses.size()); - assertEquals(JobStatus.SUCCEEDED, secondQueryJobStatuses.get(0)); - - assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.ofEpochSecond(maxCreatedAtTimestamp + 1)).size()); - } + @Test + @DisplayName("Should list all job statuses at different stages, filtered by timestamps and connection id") + + public void listJobStatusWithConnection() throws IOException { + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); + + final long wrongConnectionSyncJobId = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SYNC_JOB_CONFIG).orElseThrow(); + final int wrongSyncJobAttemptNumber0 = jobPersistence.createAttempt(wrongConnectionSyncJobId, LOG_PATH); + jobPersistence.failAttempt(wrongConnectionSyncJobId, wrongSyncJobAttemptNumber0); + assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, NOW).size()); + + final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); + final Path syncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); + final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId, syncJobSecondAttemptLogPath); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber1); + + final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(jobStatuses.size(), 1); + assertEquals(JobStatus.INCOMPLETE, jobStatuses.get(0)); + + final Path syncJobThirdAttemptLogPath = LOG_PATH.resolve("3"); + final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId, syncJobThirdAttemptLogPath); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber2); + jobPersistence.failJob(syncJobId); + + final Instant timeAfterFirstJob = NOW.plusSeconds(60); + when(timeSupplier.get()).thenReturn(timeAfterFirstJob); + + final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int newSyncJobAttemptNumber0 = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); + jobPersistence.failAttempt(newSyncJobId, newSyncJobAttemptNumber0); + final Path newSyncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); + final int newSyncJobAttemptNumber1 = jobPersistence.createAttempt(newSyncJobId, newSyncJobSecondAttemptLogPath); + jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber1); + + final List allQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(2, allQueryJobStatuses.size()); + // expect list to be in desc order + assertEquals(JobStatus.SUCCEEDED, allQueryJobStatuses.get(0)); + assertEquals(JobStatus.FAILED, allQueryJobStatuses.get(1)); + + final List secondQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterFirstJob); + assertEquals(1, secondQueryJobStatuses.size()); + assertEquals(JobStatus.SUCCEEDED, secondQueryJobStatuses.get(0)); + + final Instant timeAfterSecondJob = timeAfterFirstJob.plusSeconds(60); + assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterSecondJob).size()); + } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java index 964a279a3de1..43517bb685d9 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java @@ -180,7 +180,7 @@ private void registerConnectionManager(final WorkerFactory factory) { jobCreator), new ConfigFetchActivityImpl(configRepository, jobPersistence, configs, () -> Instant.now().getEpochSecond()), new ConnectionDeletionActivityImpl(connectionHelper), - new AutoDisableConnectionActivityImpl(configRepository, jobPersistence, featureFlags)); + new AutoDisableConnectionActivityImpl(configRepository, jobPersistence, featureFlags, configs)); } private void registerSync(final WorkerFactory factory) { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java index 64b305b21930..eee8cfb0f0d5 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java @@ -4,8 +4,8 @@ package io.airbyte.workers.temporal.scheduling.activities; -import com.google.common.annotations.VisibleForTesting; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.config.Configs; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; @@ -20,22 +20,17 @@ @AllArgsConstructor public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionActivity { - @VisibleForTesting - public static final int MAX_FAILURE_JOBS_IN_A_ROW = 100; - @VisibleForTesting - public static final int MAX_DAYS_OF_STRAIGHT_FAILURE = 14; - - private final ConfigRepository configRepository; + private ConfigRepository configRepository; private JobPersistence jobPersistence; private FeatureFlags featureFlags; + private Configs configs; @Override public void autoDisableFailingConnection(final AutoDisableConnectionActivityInput input) { if (featureFlags.disablesFailingConnections()) { - try { final List jobStatuses = jobPersistence.listJobStatusWithConnection(input.getConnectionId(), ConfigType.SYNC, - input.getCurrTimestamp().minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS)); + input.getCurrTimestamp().minus(configs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS)); if (jobStatuses.size() == 0) return; @@ -47,7 +42,7 @@ public void autoDisableFailingConnection(final AutoDisableConnectionActivityInpu for (final JobStatus jobStatus : jobStatuses) { if (jobStatus == JobStatus.FAILED) { numFailures++; - if (numFailures == MAX_FAILURE_JOBS_IN_A_ROW) + if (numFailures == configs.getMaxFailedJobsInARowBeforeConnectionDisable()) break; } else if (jobStatus == JobStatus.SUCCEEDED) { shouldDisable = false; diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index 56f377605836..2087768a5629 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -4,10 +4,10 @@ package io.airbyte.workers.temporal.scheduling.activities; -import static io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl.MAX_DAYS_OF_STRAIGHT_FAILURE; -import static io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl.MAX_FAILURE_JOBS_IN_A_ROW; +import static io.airbyte.config.EnvConfigs.DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.config.Configs; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; @@ -47,18 +47,23 @@ class AutoDisableConnectionActivityTest { @Mock private JobPersistence mJobPersistence; + @Mock + private Configs mConfigs; + @InjectMocks private AutoDisableConnectionActivityImpl autoDisableActivity; private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final Instant currInstant = Instant.now(); - private static final AutoDisableConnectionActivityInput ACTIVITY_INPUT = new AutoDisableConnectionActivityInput(CONNECTION_ID, currInstant); + private static final Instant CURR_INSTANT = Instant.now(); + private static final AutoDisableConnectionActivityInput ACTIVITY_INPUT = new AutoDisableConnectionActivityInput(CONNECTION_ID, CURR_INSTANT); + private static final int MAX_FAILURE_JOBS_IN_A_ROW = DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE; private final StandardSync standardSync = new StandardSync(); @BeforeEach void setUp() { standardSync.setStatus(Status.ACTIVE); + Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); } @Nested @@ -67,16 +72,14 @@ class AutoDisableConnectionTest { @Test @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") public void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); - // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW and 1 success final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, JobStatus.FAILED)); jobStatuses.add(JobStatus.SUCCEEDED); Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(jobStatuses); - Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)) - .thenReturn(standardSync); + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); + Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); + Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); @@ -89,9 +92,9 @@ public void testLessThanMaxFailuresInARow() throws IOException { final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, JobStatus.FAILED)); jobStatuses.add(JobStatus.SUCCEEDED); - Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(jobStatuses); + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); + Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); @@ -100,9 +103,8 @@ public void testLessThanMaxFailuresInARow() throws IOException { @Test @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") public void testNoRuns() throws IOException { - Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(Collections.emptyList()); + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(Collections.emptyList()); autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); @@ -111,11 +113,11 @@ public void testNoRuns() throws IOException { @Test @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))).thenReturn(Collections.singletonList(JobStatus.FAILED)); - Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)) - .thenReturn(standardSync); + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))) + .thenReturn(Collections.singletonList(JobStatus.FAILED)); + Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); + Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); From 2ae29b480fcd27ba48fe5538bf12e19823d44cb7 Mon Sep 17 00:00:00 2001 From: terencecho Date: Thu, 10 Mar 2022 14:21:06 -0800 Subject: [PATCH 05/38] Fix env flag name --- .../airbyte/commons/features/EnvVariableFeatureFlags.java | 6 +++--- .../main/java/io/airbyte/commons/features/FeatureFlags.java | 2 +- .../activities/AutoDisableConnectionActivityImpl.java | 2 +- .../activities/AutoDisableConnectionActivityTest.java | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java index f7214fde1d8e..c352c8d991ea 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java @@ -17,10 +17,10 @@ public boolean usesNewScheduler() { } @Override - public boolean disablesFailingConnections() { - log.info("Disable Failing Connections: " + Boolean.parseBoolean(System.getenv("DISABLE_FAILING_CONNECTIONS"))); + public boolean autoDisablesFailingConnections() { + log.info("Auto Disable Failing Connections: " + Boolean.parseBoolean(System.getenv("AUTO_DISABLE_FAILING_CONNECTIONS"))); - return Boolean.parseBoolean(System.getenv("DISABLE_FAILING_CONNECTIONS")); + return Boolean.parseBoolean(System.getenv("AUTO_DISABLE_FAILING_CONNECTIONS")); } } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java index 3a4f1c0ca11f..83e9baa1540d 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java @@ -12,6 +12,6 @@ public interface FeatureFlags { boolean usesNewScheduler(); - boolean disablesFailingConnections(); + boolean autoDisablesFailingConnections(); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java index eee8cfb0f0d5..302f544ac21f 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java @@ -27,7 +27,7 @@ public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionA @Override public void autoDisableFailingConnection(final AutoDisableConnectionActivityInput input) { - if (featureFlags.disablesFailingConnections()) { + if (featureFlags.autoDisablesFailingConnections()) { try { final List jobStatuses = jobPersistence.listJobStatusWithConnection(input.getConnectionId(), ConfigType.SYNC, input.getCurrTimestamp().minus(configs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS)); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index 2087768a5629..670903766cc7 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -63,7 +63,7 @@ class AutoDisableConnectionActivityTest { @BeforeEach void setUp() { standardSync.setStatus(Status.ACTIVE); - Mockito.when(mFeatureFlags.disablesFailingConnections()).thenReturn(true); + Mockito.when(mFeatureFlags.autoDisablesFailingConnections()).thenReturn(true); } @Nested From f209000d3515f43ac042b2fa0bf4370e88a75608 Mon Sep 17 00:00:00 2001 From: terencecho Date: Thu, 10 Mar 2022 14:41:22 -0800 Subject: [PATCH 06/38] Fix missing name changes --- .../models/src/main/java/io/airbyte/config/EnvConfigs.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java index c1be01f86e3b..90ff7d68b1e1 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/EnvConfigs.java @@ -122,7 +122,7 @@ public class EnvConfigs implements Configs { private static final String SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS = "SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS"; private static final String MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = "MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE"; - private static final String MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = "MAX_DAYS_OF_STRAIGHT_FAILURE_BEFORE_CONNECTION_DISABLE"; + private static final String MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = "MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE"; // job-type-specific overrides public static final String SPEC_JOB_KUBE_NODE_SELECTORS = "SPEC_JOB_KUBE_NODE_SELECTORS"; From 207863f87da725f8733e282713d630ee0dee449d Mon Sep 17 00:00:00 2001 From: terencecho Date: Thu, 10 Mar 2022 14:46:53 -0800 Subject: [PATCH 07/38] Add comments to unit test --- .../scheduler/persistence/DefaultJobPersistenceTest.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java index 0c6dcb4fe956..20464826a5f9 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java @@ -1317,15 +1317,16 @@ private Job addStateToJob(final Job job) throws IOException, SQLException { @Test @DisplayName("Should list all job statuses at different stages, filtered by timestamps and connection id") - public void listJobStatusWithConnection() throws IOException { jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); + // create a connection with a non-relevant connection id that should be ignored for the duration of the test final long wrongConnectionSyncJobId = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SYNC_JOB_CONFIG).orElseThrow(); final int wrongSyncJobAttemptNumber0 = jobPersistence.createAttempt(wrongConnectionSyncJobId, LOG_PATH); jobPersistence.failAttempt(wrongConnectionSyncJobId, wrongSyncJobAttemptNumber0); assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, NOW).size()); + // create initial job final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); @@ -1333,6 +1334,7 @@ public void listJobStatusWithConnection() throws IOException { final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId, syncJobSecondAttemptLogPath); jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber1); + // check to see current status of all jobs final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); assertEquals(jobStatuses.size(), 1); assertEquals(JobStatus.INCOMPLETE, jobStatuses.get(0)); @@ -1345,6 +1347,7 @@ public void listJobStatusWithConnection() throws IOException { final Instant timeAfterFirstJob = NOW.plusSeconds(60); when(timeSupplier.get()).thenReturn(timeAfterFirstJob); + // fail first job and succeed second job final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); final int newSyncJobAttemptNumber0 = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); jobPersistence.failAttempt(newSyncJobId, newSyncJobAttemptNumber0); @@ -1352,16 +1355,18 @@ public void listJobStatusWithConnection() throws IOException { final int newSyncJobAttemptNumber1 = jobPersistence.createAttempt(newSyncJobId, newSyncJobSecondAttemptLogPath); jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber1); + // check to list status of both jobs, expect the list to be in desc order, meaning latest job's status first final List allQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); assertEquals(2, allQueryJobStatuses.size()); - // expect list to be in desc order assertEquals(JobStatus.SUCCEEDED, allQueryJobStatuses.get(0)); assertEquals(JobStatus.FAILED, allQueryJobStatuses.get(1)); + // check to see if timestamp filtering is working, by only looking up jobs with a timestamp after the first job final List secondQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterFirstJob); assertEquals(1, secondQueryJobStatuses.size()); assertEquals(JobStatus.SUCCEEDED, secondQueryJobStatuses.get(0)); + // check to see if timestamp filtering is working by only looking up jobs with timestamp after second job final Instant timeAfterSecondJob = timeAfterFirstJob.plusSeconds(60); assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterSecondJob).size()); } From 26f7b3a4d744ec3f0807924da87947300c502a16 Mon Sep 17 00:00:00 2001 From: terencecho Date: Thu, 10 Mar 2022 16:28:13 -0800 Subject: [PATCH 08/38] Address PR comments --- .../persistence/DefaultJobPersistence.java | 4 +- .../DefaultJobPersistenceTest.java | 166 ++++++++++++------ .../ConnectionManagerWorkflowImpl.java | 2 +- .../AutoDisableConnectionActivityTest.java | 108 ++++++------ 4 files changed, 166 insertions(+), 114 deletions(-) diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java index 0113ee6bf555..0e0a9ea62b58 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java @@ -398,8 +398,8 @@ public List listJobStatusWithConnection(final UUID connectionId, fina "created_at >= ? ORDER BY created_at DESC", connectionId.toString(), Sqls.toSqlName(configType), timeConvertedIntoLocalDateTime)) .stream() - .flatMap(r -> Stream.of(JobStatus.valueOf(r.get("status", String.class).toUpperCase()))) - .collect(Collectors.toList()); + .map(r -> JobStatus.valueOf(r.get("status", String.class).toUpperCase())) + .toList(); } @Override diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java index 20464826a5f9..b2b72a197f42 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java @@ -89,8 +89,12 @@ class DefaultJobPersistenceTest { private static final JobConfig SYNC_JOB_CONFIG = new JobConfig() .withConfigType(ConfigType.SYNC) .withSync(new JobSyncConfig()); - private static PostgreSQLContainer container; + private static final int DEFAULT_MINIMUM_AGE_IN_DAYS = 30; + private static final int DEFAULT_EXCESSIVE_NUMBER_OF_JOBS = 500; + private static final int DEFAULT_MINIMUM_RECENCY_COUNT = 10; + + private static PostgreSQLContainer container; private Database jobDatabase; private Database configDatabase; private Supplier timeSupplier; @@ -169,7 +173,8 @@ public void setup() throws Exception { timeSupplier = mock(Supplier.class); when(timeSupplier.get()).thenReturn(NOW); - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, + DEFAULT_MINIMUM_RECENCY_COUNT); } @AfterEach @@ -337,7 +342,8 @@ void testListJobsWithTimestamp() throws IOException { final Instant now = Instant.parse("2021-01-01T00:00:00Z"); final Supplier timeSupplier = incrementingSecondSupplier(now); - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, + DEFAULT_MINIMUM_RECENCY_COUNT); final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); @@ -402,7 +408,8 @@ void testListJobsWithTimestamp() throws IOException { void testListAttemptsWithJobInfo() throws IOException { final Instant now = Instant.parse("2021-01-01T00:00:00Z"); final Supplier timeSupplier = incrementingSecondSupplier(now); - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, + DEFAULT_MINIMUM_RECENCY_COUNT); final long job1 = jobPersistence.enqueueJob(SCOPE + "-1", SYNC_JOB_CONFIG).orElseThrow(); final long job2 = jobPersistence.enqueueJob(SCOPE + "-2", SYNC_JOB_CONFIG).orElseThrow(); @@ -1315,60 +1322,111 @@ private Job addStateToJob(final Job job) throws IOException, SQLException { } - @Test - @DisplayName("Should list all job statuses at different stages, filtered by timestamps and connection id") - public void listJobStatusWithConnection() throws IOException { - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, 30, 500, 10); - - // create a connection with a non-relevant connection id that should be ignored for the duration of the test - final long wrongConnectionSyncJobId = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SYNC_JOB_CONFIG).orElseThrow(); - final int wrongSyncJobAttemptNumber0 = jobPersistence.createAttempt(wrongConnectionSyncJobId, LOG_PATH); - jobPersistence.failAttempt(wrongConnectionSyncJobId, wrongSyncJobAttemptNumber0); - assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, NOW).size()); - - // create initial job - final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); - final Path syncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); - final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId, syncJobSecondAttemptLogPath); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber1); - - // check to see current status of all jobs - final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); - assertEquals(jobStatuses.size(), 1); - assertEquals(JobStatus.INCOMPLETE, jobStatuses.get(0)); - - final Path syncJobThirdAttemptLogPath = LOG_PATH.resolve("3"); - final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId, syncJobThirdAttemptLogPath); - jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber2); - jobPersistence.failJob(syncJobId); - - final Instant timeAfterFirstJob = NOW.plusSeconds(60); - when(timeSupplier.get()).thenReturn(timeAfterFirstJob); + @Nested + @DisplayName("When listing job statuses with specified connection id and timestamp") + class ListJobStatusWithConnection { - // fail first job and succeed second job - final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int newSyncJobAttemptNumber0 = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); - jobPersistence.failAttempt(newSyncJobId, newSyncJobAttemptNumber0); - final Path newSyncJobSecondAttemptLogPath = LOG_PATH.resolve("2"); - final int newSyncJobAttemptNumber1 = jobPersistence.createAttempt(newSyncJobId, newSyncJobSecondAttemptLogPath); - jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber1); + @Test + @DisplayName("Should list only job statuses of specified connection id") + public void testConnectionIdFiltering() throws IOException { + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, + DEFAULT_MINIMUM_RECENCY_COUNT); + + // create a connection with a non-relevant connection id that should be ignored for the duration of + // the test + final long wrongConnectionSyncJobId = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SYNC_JOB_CONFIG).orElseThrow(); + final int wrongSyncJobAttemptNumber0 = jobPersistence.createAttempt(wrongConnectionSyncJobId, LOG_PATH); + jobPersistence.failAttempt(wrongConnectionSyncJobId, wrongSyncJobAttemptNumber0); + assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH).size()); + + // create a connection with relevant connection id + final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); + + // check to see current status of only relevantly scoped job + final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(jobStatuses.size(), 1); + assertEquals(JobStatus.INCOMPLETE, jobStatuses.get(0)); + } - // check to list status of both jobs, expect the list to be in desc order, meaning latest job's status first - final List allQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); - assertEquals(2, allQueryJobStatuses.size()); - assertEquals(JobStatus.SUCCEEDED, allQueryJobStatuses.get(0)); - assertEquals(JobStatus.FAILED, allQueryJobStatuses.get(1)); + @Test + @DisplayName("Should list jobs statuses filtered by different timestamps") + public void testTimestampFiltering() throws IOException { + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, + DEFAULT_MINIMUM_RECENCY_COUNT); + + // Create and fail initial job + final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int syncJobAttemptNumber0 = jobPersistence.createAttempt(syncJobId, LOG_PATH); + jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); + jobPersistence.failJob(syncJobId); + + // Check to see current status of all jobs from beginning of time, expecting only 1 job + final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(jobStatuses.size(), 1); + assertEquals(JobStatus.FAILED, jobStatuses.get(0)); + + // Edit time supplier to return later time + final Instant timeAfterFirstJob = NOW.plusSeconds(60); + when(timeSupplier.get()).thenReturn(timeAfterFirstJob); + + // Create and succeed second job + final long newSyncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int newSyncJobAttemptNumber = jobPersistence.createAttempt(newSyncJobId, LOG_PATH); + jobPersistence.succeedAttempt(newSyncJobId, newSyncJobAttemptNumber); + + // Check to see current status of all jobs from beginning of time, expecting both jobs in createAt + // descending order (most recent first) + final List allQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(2, allQueryJobStatuses.size()); + assertEquals(JobStatus.SUCCEEDED, allQueryJobStatuses.get(0)); + assertEquals(JobStatus.FAILED, allQueryJobStatuses.get(1)); + + // Look up jobs with a timestamp after the first job. Expecting only the second job status + final List timestampFilteredJobStatuses = + jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterFirstJob); + assertEquals(1, timestampFilteredJobStatuses.size()); + assertEquals(JobStatus.SUCCEEDED, timestampFilteredJobStatuses.get(0)); + + // Check to see if timestamp filtering is working by only looking up jobs with timestamp after + // second job. Expecting no job status output + final Instant timeAfterSecondJob = timeAfterFirstJob.plusSeconds(60); + assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterSecondJob).size()); + } - // check to see if timestamp filtering is working, by only looking up jobs with a timestamp after the first job - final List secondQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterFirstJob); - assertEquals(1, secondQueryJobStatuses.size()); - assertEquals(JobStatus.SUCCEEDED, secondQueryJobStatuses.get(0)); + @Test + @DisplayName("Should list jobs statuses of differing status types") + public void testMultipleJobStatusTypes() throws IOException { + final Supplier timeSupplier = incrementingSecondSupplier(NOW); + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, + DEFAULT_MINIMUM_RECENCY_COUNT); + + // Create and fail initial job + final long syncJobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId1, LOG_PATH); + jobPersistence.failAttempt(syncJobId1, syncJobAttemptNumber1); + jobPersistence.failJob(syncJobId1); + + // Create and succeed second job + final long syncJobId2 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId2, LOG_PATH); + jobPersistence.succeedAttempt(syncJobId2, syncJobAttemptNumber2); + + // Create and cancel third job + final long syncJobId3 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + jobPersistence.createAttempt(syncJobId3, LOG_PATH); + jobPersistence.cancelJob(syncJobId3); + + // Check to see current status of all jobs from beginning of time, expecting all jobs in createAt + // descending order (most recent first) + final List allJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + assertEquals(3, allJobStatuses.size()); + assertEquals(JobStatus.CANCELLED, allJobStatuses.get(0)); + assertEquals(JobStatus.SUCCEEDED, allJobStatuses.get(1)); + assertEquals(JobStatus.FAILED, allJobStatuses.get(2)); + } - // check to see if timestamp filtering is working by only looking up jobs with timestamp after second job - final Instant timeAfterSecondJob = timeAfterFirstJob.plusSeconds(60); - assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterSecondJob).size()); } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index b3ed6455fb22..8847ec7cb25e 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -234,7 +234,7 @@ private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, if (attemptCreationVersion >= DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION) { final AutoDisableConnectionActivityInput autoDisableConnectionActivityInput = - new AutoDisableConnectionActivityInput(connectionId, Instant.now()); + new AutoDisableConnectionActivityInput(connectionId, Instant.ofEpochMilli(Workflow.currentTimeMillis())); runMandatoryActivity(autoDisableConnectionActivity::autoDisableFailingConnection, autoDisableConnectionActivityInput); } resetNewConnectionInput(connectionUpdaterInput); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index 670903766cc7..68f5b1ccd824 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -27,7 +27,6 @@ import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; @@ -66,63 +65,58 @@ void setUp() { Mockito.when(mFeatureFlags.autoDisablesFailingConnections()).thenReturn(true); } - @Nested - class AutoDisableConnectionTest { - - @Test - @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") - public void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { - // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW and 1 success - final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, JobStatus.FAILED)); - jobStatuses.add(JobStatus.SUCCEEDED); - - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); - Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); - - autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); - } - - @Test - @DisplayName("Test that the connection is _not_ disabled after MAX_FAILURE_JOBS_IN_A_ROW - 1 straight failures") - public void testLessThanMaxFailuresInARow() throws IOException { - // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW-1 and 1 success - final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, JobStatus.FAILED)); - jobStatuses.add(JobStatus.SUCCEEDED); - - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); - Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - - autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); - } - - @Test - @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") - public void testNoRuns() throws IOException { - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(Collections.emptyList()); - - autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); - } - - @Test - @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") - public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, - CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))) - .thenReturn(Collections.singletonList(JobStatus.FAILED)); - Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); - Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - - autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); - } + @Test + @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") + public void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { + // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW and 1 success + final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, JobStatus.FAILED)); + jobStatuses.add(JobStatus.SUCCEEDED); + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); + Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); + Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); + + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); + } + + @Test + @DisplayName("Test that the connection is _not_ disabled after MAX_FAILURE_JOBS_IN_A_ROW - 1 straight failures") + public void testLessThanMaxFailuresInARow() throws IOException { + // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW-1 and 1 success + final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, JobStatus.FAILED)); + jobStatuses.add(JobStatus.SUCCEEDED); + + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); + Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); + + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); + } + + @Test + @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") + public void testNoRuns() throws IOException { + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(Collections.emptyList()); + + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); + } + + @Test + @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") + public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))) + .thenReturn(Collections.singletonList(JobStatus.FAILED)); + Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); + Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); + + autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); } } From dcd5636191b06a827b9f1bf6366f59bb08e0452a Mon Sep 17 00:00:00 2001 From: terencecho Date: Fri, 11 Mar 2022 13:57:04 -0800 Subject: [PATCH 09/38] Support multiple config types --- .../persistence/DefaultJobPersistence.java | 7 ++- .../scheduler/persistence/JobPersistence.java | 5 ++- .../DefaultJobPersistenceTest.java | 43 ++++++++++++++++--- .../AutoDisableConnectionActivityImpl.java | 8 +++- .../AutoDisableConnectionActivityTest.java | 10 ++--- 5 files changed, 54 insertions(+), 19 deletions(-) diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java index 0e0a9ea62b58..826e71f695f8 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/DefaultJobPersistence.java @@ -386,7 +386,7 @@ public List listJobsWithStatus(final ConfigType configType, final JobStatus } @Override - public List listJobStatusWithConnection(final UUID connectionId, final ConfigType configType, final Instant jobCreatedAtTimestamp) + public List listJobStatusWithConnection(final UUID connectionId, final Set configTypes, final Instant jobCreatedAtTimestamp) throws IOException { final LocalDateTime timeConvertedIntoLocalDateTime = LocalDateTime.ofInstant(jobCreatedAtTimestamp, ZoneOffset.UTC); @@ -394,9 +394,8 @@ public List listJobStatusWithConnection(final UUID connectionId, fina return jobDatabase.query(ctx -> ctx .fetch(JobStatusSelect + "WHERE " + "CAST(scope AS VARCHAR) = ? AND " + - "CAST(config_type AS VARCHAR) = ? AND " + - "created_at >= ? ORDER BY created_at DESC", connectionId.toString(), - Sqls.toSqlName(configType), timeConvertedIntoLocalDateTime)) + "CAST(config_type AS VARCHAR) in " + Sqls.toSqlInFragment(configTypes) + " AND " + + "created_at >= ? ORDER BY created_at DESC", connectionId.toString(), timeConvertedIntoLocalDateTime)) .stream() .map(r -> JobStatus.valueOf(r.get("status", String.class).toUpperCase())) .toList(); diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java index 887119da2a15..cda5b52f5864 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java @@ -162,13 +162,14 @@ public interface JobPersistence { /** * @param connectionId The ID of the connection - * @param configType The type of job + * @param configTypes The types of jobs * @param jobCreatedAtTimestamp The timestamp after which you want the jobs * @return List of job statuses from a specific connection that have attempts after the provided * timestamp, sorted by jobs' createAt in descending order * @throws IOException */ - List listJobStatusWithConnection(UUID connectionId, ConfigType configType, Instant jobCreatedAtTimestamp) throws IOException; + List listJobStatusWithConnection(UUID connectionId, Set configTypes, Instant jobCreatedAtTimestamp) + throws IOException; Optional getLastReplicationJob(UUID connectionId) throws IOException; diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java index b2b72a197f42..a5d50a11fb17 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/DefaultJobPersistenceTest.java @@ -1337,7 +1337,7 @@ public void testConnectionIdFiltering() throws IOException { final long wrongConnectionSyncJobId = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SYNC_JOB_CONFIG).orElseThrow(); final int wrongSyncJobAttemptNumber0 = jobPersistence.createAttempt(wrongConnectionSyncJobId, LOG_PATH); jobPersistence.failAttempt(wrongConnectionSyncJobId, wrongSyncJobAttemptNumber0); - assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH).size()); + assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH).size()); // create a connection with relevant connection id final long syncJobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); @@ -1345,7 +1345,7 @@ public void testConnectionIdFiltering() throws IOException { jobPersistence.failAttempt(syncJobId, syncJobAttemptNumber0); // check to see current status of only relevantly scoped job - final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); assertEquals(jobStatuses.size(), 1); assertEquals(JobStatus.INCOMPLETE, jobStatuses.get(0)); } @@ -1363,7 +1363,7 @@ public void testTimestampFiltering() throws IOException { jobPersistence.failJob(syncJobId); // Check to see current status of all jobs from beginning of time, expecting only 1 job - final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + final List jobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); assertEquals(jobStatuses.size(), 1); assertEquals(JobStatus.FAILED, jobStatuses.get(0)); @@ -1378,21 +1378,22 @@ public void testTimestampFiltering() throws IOException { // Check to see current status of all jobs from beginning of time, expecting both jobs in createAt // descending order (most recent first) - final List allQueryJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + final List allQueryJobStatuses = + jobPersistence.listJobStatusWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); assertEquals(2, allQueryJobStatuses.size()); assertEquals(JobStatus.SUCCEEDED, allQueryJobStatuses.get(0)); assertEquals(JobStatus.FAILED, allQueryJobStatuses.get(1)); // Look up jobs with a timestamp after the first job. Expecting only the second job status final List timestampFilteredJobStatuses = - jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterFirstJob); + jobPersistence.listJobStatusWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), timeAfterFirstJob); assertEquals(1, timestampFilteredJobStatuses.size()); assertEquals(JobStatus.SUCCEEDED, timestampFilteredJobStatuses.get(0)); // Check to see if timestamp filtering is working by only looking up jobs with timestamp after // second job. Expecting no job status output final Instant timeAfterSecondJob = timeAfterFirstJob.plusSeconds(60); - assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, timeAfterSecondJob).size()); + assertEquals(0, jobPersistence.listJobStatusWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), timeAfterSecondJob).size()); } @Test @@ -1420,13 +1421,41 @@ public void testMultipleJobStatusTypes() throws IOException { // Check to see current status of all jobs from beginning of time, expecting all jobs in createAt // descending order (most recent first) - final List allJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, Instant.EPOCH); + final List allJobStatuses = + jobPersistence.listJobStatusWithConnection(CONNECTION_ID, Sets.newHashSet(ConfigType.SYNC), Instant.EPOCH); assertEquals(3, allJobStatuses.size()); assertEquals(JobStatus.CANCELLED, allJobStatuses.get(0)); assertEquals(JobStatus.SUCCEEDED, allJobStatuses.get(1)); assertEquals(JobStatus.FAILED, allJobStatuses.get(2)); } + @Test + @DisplayName("Should list jobs statuses of differing job config types") + public void testMultipleConfigTypes() throws IOException { + final Set configTypes = Sets.newHashSet(ConfigType.GET_SPEC, ConfigType.CHECK_CONNECTION_DESTINATION); + final Supplier timeSupplier = incrementingSecondSupplier(NOW); + jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, + DEFAULT_MINIMUM_RECENCY_COUNT); + + // pending status + final long failedSpecJobId = jobPersistence.enqueueJob(SCOPE, CHECK_JOB_CONFIG).orElseThrow(); + jobPersistence.failJob(failedSpecJobId); + + // incomplete status + final long incompleteSpecJobId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); + final int attemptNumber = jobPersistence.createAttempt(incompleteSpecJobId, LOG_PATH); + jobPersistence.failAttempt(incompleteSpecJobId, attemptNumber); + + // this job should be ignored since it's not in the configTypes we're querying for + jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + + // expect order to be from most recent to least recent + final List allJobStatuses = jobPersistence.listJobStatusWithConnection(CONNECTION_ID, configTypes, Instant.EPOCH); + assertEquals(2, allJobStatuses.size()); + assertEquals(JobStatus.INCOMPLETE, allJobStatuses.get(0)); + assertEquals(JobStatus.FAILED, allJobStatuses.get(1)); + } + } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java index 302f544ac21f..c8e34e92d6e2 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java @@ -4,6 +4,8 @@ package io.airbyte.workers.temporal.scheduling.activities; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Sets; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.Configs; import io.airbyte.config.JobConfig.ConfigType; @@ -15,11 +17,15 @@ import io.airbyte.workers.temporal.exception.RetryableException; import java.time.temporal.ChronoUnit; import java.util.List; +import java.util.Set; import lombok.AllArgsConstructor; @AllArgsConstructor public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionActivity { + @VisibleForTesting + public static final Set AUTO_DISABLE_CONFIG_TYPES = Sets.newHashSet(ConfigType.SYNC, ConfigType.RESET_CONNECTION); + private ConfigRepository configRepository; private JobPersistence jobPersistence; private FeatureFlags featureFlags; @@ -29,7 +35,7 @@ public class AutoDisableConnectionActivityImpl implements AutoDisableConnectionA public void autoDisableFailingConnection(final AutoDisableConnectionActivityInput input) { if (featureFlags.autoDisablesFailingConnections()) { try { - final List jobStatuses = jobPersistence.listJobStatusWithConnection(input.getConnectionId(), ConfigType.SYNC, + final List jobStatuses = jobPersistence.listJobStatusWithConnection(input.getConnectionId(), AUTO_DISABLE_CONFIG_TYPES, input.getCurrTimestamp().minus(configs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS)); if (jobStatuses.size() == 0) diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index 68f5b1ccd824..86ef82ff32f3 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -5,10 +5,10 @@ package io.airbyte.workers.temporal.scheduling.activities; import static io.airbyte.config.EnvConfigs.DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE; +import static io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivityImpl.AUTO_DISABLE_CONFIG_TYPES; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.Configs; -import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; import io.airbyte.config.persistence.ConfigNotFoundException; @@ -72,7 +72,7 @@ public void testMaxFailuresInARow() throws IOException, JsonValidationException, final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, JobStatus.FAILED)); jobStatuses.add(JobStatus.SUCCEEDED); - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, AUTO_DISABLE_CONFIG_TYPES, CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); @@ -88,7 +88,7 @@ public void testLessThanMaxFailuresInARow() throws IOException { final List jobStatuses = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, JobStatus.FAILED)); jobStatuses.add(JobStatus.SUCCEEDED); - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, AUTO_DISABLE_CONFIG_TYPES, CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(jobStatuses); Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); @@ -99,7 +99,7 @@ public void testLessThanMaxFailuresInARow() throws IOException { @Test @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") public void testNoRuns() throws IOException { - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, AUTO_DISABLE_CONFIG_TYPES, CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))).thenReturn(Collections.emptyList()); autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); @@ -109,7 +109,7 @@ public void testNoRuns() throws IOException { @Test @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, ConfigType.SYNC, + Mockito.when(mJobPersistence.listJobStatusWithConnection(CONNECTION_ID, AUTO_DISABLE_CONFIG_TYPES, CURR_INSTANT.minus(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable(), ChronoUnit.DAYS))) .thenReturn(Collections.singletonList(JobStatus.FAILED)); Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); From 95a8ad96e87a85dd77e0874ca78bc0c47574c4bb Mon Sep 17 00:00:00 2001 From: terencecho Date: Sat, 12 Mar 2022 23:27:27 -0800 Subject: [PATCH 10/38] Update unit tests --- .../scheduling/ConnectionManagerWorkflowImpl.java | 8 +++++--- .../scheduling/ConnectionManagerWorkflowTest.java | 8 ++++++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index 8847ec7cb25e..c654f034880b 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -58,7 +58,7 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow public static final int NON_RUNNING_ATTEMPT_ID = -1; private static final int TASK_QUEUE_CHANGE_CURRENT_VERSION = 1; - private static final int DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION = 1; + private static final int AUTO_DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION = 1; private WorkflowState workflowState = new WorkflowState(UUID.randomUUID(), new NoopStateListener()); @@ -229,14 +229,16 @@ private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobFailure, new JobFailureInput( connectionUpdaterInput.getJobId(), "Job failed after too many retries for connection " + connectionId)); + final int attemptCreationVersion = - Workflow.getVersion("disable_failing_connection", Workflow.DEFAULT_VERSION, DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION); + Workflow.getVersion("auto_disable_failing_connection", Workflow.DEFAULT_VERSION, AUTO_DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION); - if (attemptCreationVersion >= DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION) { + if (attemptCreationVersion != Workflow.DEFAULT_VERSION) { final AutoDisableConnectionActivityInput autoDisableConnectionActivityInput = new AutoDisableConnectionActivityInput(connectionId, Instant.ofEpochMilli(Workflow.currentTimeMillis())); runMandatoryActivity(autoDisableConnectionActivity::autoDisableFailingConnection, autoDisableConnectionActivityInput); } + resetNewConnectionInput(connectionUpdaterInput); if (workflowState.isResetConnection()) { connectionUpdaterInput.setFromJobResetFailure(true); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index 083dfd8eeb88..ca897b5afe80 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -10,6 +10,7 @@ import io.airbyte.scheduler.models.IntegrationLauncherConfig; import io.airbyte.scheduler.models.JobRunConfig; import io.airbyte.workers.temporal.TemporalJobType; +import io.airbyte.workers.temporal.scheduling.activities.AutoDisableConnectionActivity; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.GetMaxAttemptOutput; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; @@ -87,6 +88,8 @@ public class ConnectionManagerWorkflowTest { Mockito.mock(GenerateInputActivityImpl.class, Mockito.withSettings().withoutAnnotations()); private static final JobCreationAndStatusUpdateActivity mJobCreationAndStatusUpdateActivity = Mockito.mock(JobCreationAndStatusUpdateActivity.class, Mockito.withSettings().withoutAnnotations()); + private static final AutoDisableConnectionActivity mAutoDisableConnectionActivity = + Mockito.mock(AutoDisableConnectionActivity.class, Mockito.withSettings().withoutAnnotations()); private TestWorkflowEnvironment testEnv; private WorkflowClient client; @@ -108,6 +111,7 @@ public void setUp() { Mockito.reset(mConnectionDeletionActivity); Mockito.reset(mGenerateInputActivityImpl); Mockito.reset(mJobCreationAndStatusUpdateActivity); + Mockito.reset(mAutoDisableConnectionActivity); // default is to wait "forever" Mockito.when(mConfigFetchActivity.getTimeToWait(Mockito.any())).thenReturn(new ScheduleRetrieverOutput( @@ -702,7 +706,7 @@ public void setup() { final Worker managerWorker = testEnv.newWorker(TemporalJobType.CONNECTION_UPDATER.name()); managerWorker.registerWorkflowImplementationTypes(ConnectionManagerWorkflowImpl.class); managerWorker.registerActivitiesImplementations(mConfigFetchActivity, mConnectionDeletionActivity, - mGenerateInputActivityImpl, mJobCreationAndStatusUpdateActivity); + mGenerateInputActivityImpl, mJobCreationAndStatusUpdateActivity, mAutoDisableConnectionActivity); client = testEnv.getWorkflowClient(); workflow = client.newWorkflowStub(ConnectionManagerWorkflow.class, @@ -1260,7 +1264,7 @@ private void setupSpecificChildWorkflow(final Class final Worker managerWorker = testEnv.newWorker(TemporalJobType.CONNECTION_UPDATER.name()); managerWorker.registerWorkflowImplementationTypes(ConnectionManagerWorkflowImpl.class); managerWorker.registerActivitiesImplementations(mConfigFetchActivity, mConnectionDeletionActivity, - mGenerateInputActivityImpl, mJobCreationAndStatusUpdateActivity); + mGenerateInputActivityImpl, mJobCreationAndStatusUpdateActivity, mAutoDisableConnectionActivity); client = testEnv.getWorkflowClient(); testEnv.start(); From dc61d124c6c27398854d1bf9600cac66489823dc Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Thu, 10 Mar 2022 23:35:34 +0100 Subject: [PATCH 11/38] Bump Airbyte version from 0.35.49-alpha to 0.35.50-alpha (#11050) Co-authored-by: lmossman --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 4 ++-- airbyte-container-orchestrator/Dockerfile | 6 +++--- airbyte-metrics/reporter/Dockerfile | 4 ++-- airbyte-scheduler/app/Dockerfile | 4 ++-- airbyte-server/Dockerfile | 4 ++-- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 4 ++-- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ 18 files changed, 44 insertions(+), 44 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 14f58da9f624..9d3a7df07143 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.35.49-alpha +current_version = 0.35.50-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index c93fd5f8cee6..b79e0cdfd6b3 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.35.49-alpha +VERSION=0.35.50-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 647aaca47810..1c10e18c14ff 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -5,6 +5,6 @@ ENV APPLICATION airbyte-bootloader WORKDIR /app -ADD bin/${APPLICATION}-0.35.49-alpha.tar /app +ADD bin/${APPLICATION}-0.35.50-alpha.tar /app -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index de594bb1cc72..50091530078f 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,12 +26,12 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl ENV APPLICATION airbyte-container-orchestrator -ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}" +ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}" WORKDIR /app # Move orchestrator app -ADD bin/${APPLICATION}-0.35.49-alpha.tar /app +ADD bin/${APPLICATION}-0.35.50-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 871880780653..2c9475ccc932 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-metrics-reporter WORKDIR /app -ADD bin/${APPLICATION}-0.35.49-alpha.tar /app +ADD bin/${APPLICATION}-0.35.50-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index ae3adf119415..dadb5c151749 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-scheduler WORKDIR /app -ADD bin/${APPLICATION}-0.35.49-alpha.tar /app +ADD bin/${APPLICATION}-0.35.50-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index e2dc5eb79d53..eb6a7a171365 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -7,7 +7,7 @@ ENV APPLICATION airbyte-server WORKDIR /app -ADD bin/${APPLICATION}-0.35.49-alpha.tar /app +ADD bin/${APPLICATION}-0.35.50-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 7850c4468b0a..da97c2d1aa49 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.35.49-alpha", + "version": "0.35.50-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.35.49-alpha", + "version": "0.35.50-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^1.2.36", "@fortawesome/free-brands-svg-icons": "^5.15.4", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 845dafef44d6..d6d824c38f0b 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.35.49-alpha", + "version": "0.35.50-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 8afb283a9a33..db067476c928 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -30,7 +30,7 @@ ENV APPLICATION airbyte-workers WORKDIR /app # Move worker app -ADD bin/${APPLICATION}-0.35.49-alpha.tar /app +ADD bin/${APPLICATION}-0.35.50-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.49-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 83850168df36..1e03fa499221 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.35.49-alpha" +appVersion: "0.35.50-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 5ff1a0a541eb..08fb170665fd 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -29,7 +29,7 @@ | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.49-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -71,7 +71,7 @@ | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.49-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -118,7 +118,7 @@ | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.49-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -156,7 +156,7 @@ | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.49-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -188,7 +188,7 @@ | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.49-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index ddbcb6d1a78c..9dc4596790d5 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.35.49-alpha + tag: 0.35.50-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.35.49-alpha + tag: 0.35.50-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.35.49-alpha + tag: 0.35.50-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.35.49-alpha + tag: 0.35.50-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.35.49-alpha + tag: 0.35.50-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index bcac2dbd736b..5937fecf675d 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -101,7 +101,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.35.49-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.35.50-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 7c679a881f21..80c00753be3c 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.49-alpha +AIRBYTE_VERSION=0.35.50-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index c4f458d21860..5a472f4b17d1 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/bootloader - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/scheduler - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/server - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/webapp - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/worker - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 5e69fb1f2d4f..402bf99c1ed6 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.49-alpha +AIRBYTE_VERSION=0.35.50-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 1d02adaba47b..493973411812 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/bootloader - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/scheduler - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/server - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/webapp - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: airbyte/worker - newTag: 0.35.49-alpha + newTag: 0.35.50-alpha - name: temporalio/auto-setup newTag: 1.7.0 From ade53bd15520cd1edccc963e1e3667fe5af5cd28 Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Thu, 10 Mar 2022 15:33:14 -0800 Subject: [PATCH 12/38] Remove the attemptId notion in the connectionManagerWorkflow (#10780) This is removing the attemptId from the create attempt activity to replace it with the attemptNumber. This will be modified in the workflow in a later commit. --- .../scheduler/persistence/JobPersistence.java | 6 +- airbyte-workers/build.gradle | 2 +- .../ConnectionManagerWorkflowImpl.java | 134 ++- .../activities/GenerateInputActivity.java | 17 + .../activities/GenerateInputActivityImpl.java | 8 + .../JobCreationAndStatusUpdateActivity.java | 70 ++ ...obCreationAndStatusUpdateActivityImpl.java | 43 + .../state/WorkflowInternalState.java | 2 +- .../ConnectionManagerWorkflowTest.java | 74 +- .../scheduling/WorkflowReplayingTest.java | 24 + ...obCreationAndStatusUpdateActivityTest.java | 52 + .../src/test/resources/workflowHistory.json | 939 ++++++++++++++++++ 12 files changed, 1304 insertions(+), 67 deletions(-) create mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java create mode 100644 airbyte-workers/src/test/resources/workflowHistory.json diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java index cda5b52f5864..20db344f2fd6 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java @@ -75,12 +75,12 @@ public interface JobPersistence { // /** - * Create a new attempt for a job. Throws {@link IllegalStateException} if the job is already in a - * terminal state. + * Create a new attempt for a job and return its attempt number. Throws + * {@link IllegalStateException} if the job is already in a terminal state. * * @param jobId job for which an attempt will be created * @param logPath path where logs should be written for the attempt - * @return id of the attempt + * @return The attempt number of the created attempt (see {@link DefaultJobPersistence}) * @throws IOException exception due to interaction with persistence */ int createAttempt(long jobId, Path logPath) throws IOException; diff --git a/airbyte-workers/build.gradle b/airbyte-workers/build.gradle index e37a5b9a4482..ea4d418ed7c0 100644 --- a/airbyte-workers/build.gradle +++ b/airbyte-workers/build.gradle @@ -33,7 +33,7 @@ dependencies { implementation project(':airbyte-scheduler:models') testImplementation 'io.temporal:temporal-testing:1.8.1' - testImplementation 'io.temporal:temporal-testing-junit5:1.5.0' // versioned separately from rest of temporal + testImplementation 'com.jayway.jsonpath:json-path:2.7.0' testImplementation "org.flywaydb:flyway-core:7.14.0" testImplementation 'org.mockito:mockito-inline:4.0.0' testImplementation 'org.postgresql:postgresql:42.2.18' diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index c654f034880b..16ecec71a2fb 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -21,15 +21,20 @@ import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; +import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.ReportJobStartInput; import io.airbyte.workers.temporal.scheduling.shared.ActivityConfiguration; import io.airbyte.workers.temporal.scheduling.state.WorkflowInternalState; @@ -60,6 +65,9 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow private static final int TASK_QUEUE_CHANGE_CURRENT_VERSION = 1; private static final int AUTO_DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION = 1; + private static final String RENAME_ATTEMPT_ID_TO_NUMBER_TAG = "rename_attempt_id_to_number"; + private static final int RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION = 1; + private WorkflowState workflowState = new WorkflowState(UUID.randomUUID(), new NoopStateListener()); private final WorkflowInternalState workflowInternalState = new WorkflowInternalState(); @@ -147,7 +155,7 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn workflowInternalState.setJobId(getOrCreateJobId(connectionUpdaterInput)); - workflowInternalState.setAttemptId(createAttemptId(workflowInternalState.getJobId())); + workflowInternalState.setAttemptNumber(createAttempt(workflowInternalState.getJobId())); final GeneratedJobInput jobInputs = getJobInput(); @@ -183,13 +191,13 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn af.getActivityType(), af.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptId())); + workflowInternalState.getAttemptNumber())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } else { workflowInternalState.getFailures().add( FailureHelper.unknownOriginFailure(childWorkflowFailure.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptId())); + workflowInternalState.getAttemptNumber())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } @@ -199,20 +207,41 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn private void reportSuccess(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { workflowState.setSuccess(true); - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptId(), - standardSyncOutput)); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput)); + } else { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccessWithAttemptNumber, new JobSuccessInputWithAttemptNumber( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput)); + } resetNewConnectionInput(connectionUpdaterInput); } private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptId(), - standardSyncOutput, - FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput, + FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); + } else { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailureWithAttemptNumber, new AttemptNumberFailureInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput, + FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); + } final int maxAttempt = configFetchActivity.getMaxAttempt().getMaxAttempt(); final int attemptNumber = connectionUpdaterInput.getAttemptNumber(); @@ -306,20 +335,20 @@ public WorkflowState getState() { @Override public JobInformation getJobInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); return new JobInformation( jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId); + attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber); } @Override public QuarantinedInformation getQuarantinedInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); return new QuarantinedInformation( connectionId, jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId, + attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber, workflowState.isQuarantined()); } @@ -419,15 +448,31 @@ private Long getOrCreateJobId(final ConnectionUpdaterInput connectionUpdaterInpu /** * Create a new attempt for a given jobId + * + * @param jobId - the jobId associated with the new attempt + * + * @return The attempt number */ - private Integer createAttemptId(final long jobId) { - final AttemptCreationOutput attemptCreationOutput = + private Integer createAttempt(final long jobId) { + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + // Retrieve the attempt number but name it attempt id + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + final AttemptCreationOutput attemptCreationOutput = + runMandatoryActivityWithOutput( + jobCreationAndStatusUpdateActivity::createNewAttempt, + new AttemptCreationInput( + jobId)); + return attemptCreationOutput.getAttemptId(); + } + + final AttemptNumberCreationOutput attemptNumberCreationOutput = runMandatoryActivityWithOutput( - jobCreationAndStatusUpdateActivity::createNewAttempt, + jobCreationAndStatusUpdateActivity::createNewAttemptNumber, new AttemptCreationInput( jobId)); - - return attemptCreationOutput.getAttemptId(); + return attemptNumberCreationOutput.getAttemptNumber(); } /** @@ -436,14 +481,30 @@ private Integer createAttemptId(final long jobId) { */ private GeneratedJobInput getJobInput() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); - final SyncInput getSyncInputActivitySyncInput = new SyncInput( - attemptId, + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + final SyncInput getSyncInputActivitySyncInput = new SyncInput( + attemptNumber, + jobId, + workflowState.isResetConnection()); + + final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( + getSyncInputActivity::getSyncWorkflowInput, + getSyncInputActivitySyncInput); + + return syncWorkflowInputs; + } + + final SyncInputWithAttemptNumber getSyncInputActivitySyncInput = new SyncInputWithAttemptNumber( + attemptNumber, jobId, workflowState.isResetConnection()); final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( - getSyncInputActivity::getSyncWorkflowInput, + getSyncInputActivity::getSyncWorkflowInputWithAttemptNumber, getSyncInputActivitySyncInput); return syncWorkflowInputs; @@ -529,14 +590,25 @@ private void deleteConnectionBeforeTerminatingTheWorkflow() { private void reportCancelledAndContinueWith(final boolean isReset, final ConnectionUpdaterInput connectionUpdaterInput) { workflowState.setContinueAsReset(isReset); final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); final Set failures = workflowInternalState.getFailures(); final Boolean partialSuccess = workflowInternalState.getPartialSuccess(); - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, - new JobCancelledInput( - jobId, - attemptId, - FailureHelper.failureSummaryForCancellation(jobId, attemptId, failures, partialSuccess))); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, + new JobCancelledInput( + jobId, + attemptNumber, + FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); + } else { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelledWithAttemptNumber, + new JobCancelledInputWithAttemptNumber( + jobId, + attemptNumber, + FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); + } resetNewConnectionInput(connectionUpdaterInput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java index 0a1ed70c3008..60191311ef34 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java @@ -27,6 +27,17 @@ class SyncInput { } + @Data + @NoArgsConstructor + @AllArgsConstructor + class SyncInputWithAttemptNumber { + + private int attemptNumber; + private long jobId; + private boolean reset; + + } + @Data @NoArgsConstructor @AllArgsConstructor @@ -45,4 +56,10 @@ class GeneratedJobInput { @ActivityMethod GeneratedJobInput getSyncWorkflowInput(SyncInput input); + /** + * This generate the input needed by the child sync workflow + */ + @ActivityMethod + GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(SyncInputWithAttemptNumber input); + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java index 30edaf108adc..bcb939d022c8 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java @@ -76,4 +76,12 @@ public GeneratedJobInput getSyncWorkflowInput(final SyncInput input) { } } + @Override + public GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(final SyncInputWithAttemptNumber input) { + return getSyncWorkflowInput(new SyncInput( + input.getAttemptNumber(), + input.getJobId(), + input.isReset())); + } + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java index aa45b53b0e8c..2de587c2a15b 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java @@ -72,6 +72,24 @@ class AttemptCreationOutput { @ActivityMethod AttemptCreationOutput createNewAttempt(AttemptCreationInput input) throws RetryableException; + @Data + @NoArgsConstructor + @AllArgsConstructor + class AttemptNumberCreationOutput { + + private int attemptNumber; + + } + + /** + * Create a new attempt for a given job ID + * + * @param input POJO containing the jobId + * @return A POJO containing the attemptNumber + */ + @ActivityMethod + AttemptNumberCreationOutput createNewAttemptNumber(AttemptCreationInput input) throws RetryableException; + @Data @NoArgsConstructor @AllArgsConstructor @@ -89,6 +107,23 @@ class JobSuccessInput { @ActivityMethod void jobSuccess(JobSuccessInput input); + @Data + @NoArgsConstructor + @AllArgsConstructor + class JobSuccessInputWithAttemptNumber { + + private long jobId; + private int attemptNumber; + private StandardSyncOutput standardSyncOutput; + + } + + /** + * Set a job status as successful + */ + @ActivityMethod + void jobSuccessWithAttemptNumber(JobSuccessInputWithAttemptNumber input); + @Data @NoArgsConstructor @AllArgsConstructor @@ -123,6 +158,24 @@ class AttemptFailureInput { @ActivityMethod void attemptFailure(AttemptFailureInput input); + @Data + @NoArgsConstructor + @AllArgsConstructor + class AttemptNumberFailureInput { + + private long jobId; + private int attemptNumber; + private StandardSyncOutput standardSyncOutput; + private AttemptFailureSummary attemptFailureSummary; + + } + + /** + * Set an attempt status as failed + */ + @ActivityMethod + void attemptFailureWithAttemptNumber(AttemptNumberFailureInput input); + @Data @NoArgsConstructor @AllArgsConstructor @@ -140,6 +193,23 @@ class JobCancelledInput { @ActivityMethod void jobCancelled(JobCancelledInput input); + @Data + @NoArgsConstructor + @AllArgsConstructor + class JobCancelledInputWithAttemptNumber { + + private long jobId; + private int attemptNumber; + private AttemptFailureSummary attemptFailureSummary; + + } + + /** + * Set a job status as cancelled + */ + @ActivityMethod + void jobCancelledWithAttemptNumber(JobCancelledInputWithAttemptNumber input); + @Data @NoArgsConstructor @AllArgsConstructor diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java index e02796675174..b9a76065e776 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java @@ -128,6 +128,24 @@ public AttemptCreationOutput createNewAttempt(final AttemptCreationInput input) } } + @Override + public AttemptNumberCreationOutput createNewAttemptNumber(final AttemptCreationInput input) throws RetryableException { + try { + final long jobId = input.getJobId(); + final Job createdJob = jobPersistence.getJob(jobId); + + final WorkerRun workerRun = temporalWorkerRunFactory.create(createdJob); + final Path logFilePath = workerRun.getJobRoot().resolve(LogClientSingleton.LOG_FILENAME); + final int persistedAttemptNumber = jobPersistence.createAttempt(jobId, logFilePath); + emitJobIdToReleaseStagesMetric(MetricsRegistry.ATTEMPT_CREATED_BY_RELEASE_STAGE, jobId); + + LogClientSingleton.getInstance().setJobMdc(workerEnvironment, logConfigs, workerRun.getJobRoot()); + return new AttemptNumberCreationOutput(persistedAttemptNumber); + } catch (final IOException e) { + throw new RetryableException(e); + } + } + @Override public void jobSuccess(final JobSuccessInput input) { try { @@ -152,6 +170,14 @@ public void jobSuccess(final JobSuccessInput input) { } } + @Override + public void jobSuccessWithAttemptNumber(final JobSuccessInputWithAttemptNumber input) { + jobSuccess(new JobSuccessInput( + input.getJobId(), + input.getAttemptNumber(), + input.getStandardSyncOutput())); + } + @Override public void jobFailure(final JobFailureInput input) { try { @@ -191,6 +217,15 @@ public void attemptFailure(final AttemptFailureInput input) { } } + @Override + public void attemptFailureWithAttemptNumber(final AttemptNumberFailureInput input) { + attemptFailure(new AttemptFailureInput( + input.getJobId(), + input.getAttemptNumber(), + input.getStandardSyncOutput(), + input.getAttemptFailureSummary())); + } + @Override public void jobCancelled(final JobCancelledInput input) { try { @@ -209,6 +244,14 @@ public void jobCancelled(final JobCancelledInput input) { } } + @Override + public void jobCancelledWithAttemptNumber(final JobCancelledInputWithAttemptNumber input) { + jobCancelled(new JobCancelledInput( + input.getJobId(), + input.getAttemptNumber(), + input.getAttemptFailureSummary())); + } + @Override public void reportJobStart(final ReportJobStartInput input) { try { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java index 822bbfe0ba48..d9bdcb2d5800 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java @@ -17,7 +17,7 @@ public class WorkflowInternalState { private Long jobId = null; - private Integer attemptId = null; + private Integer attemptNumber = null; // StandardSyncOutput standardSyncOutput = null; private final Set failures = new HashSet<>(); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index ca897b5afe80..8353b9116940 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -16,13 +16,14 @@ import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; -import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; +import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.state.WorkflowState; import io.airbyte.workers.temporal.scheduling.state.listener.TestStateListener; import io.airbyte.workers.temporal.scheduling.state.listener.WorkflowStateChangedListener.ChangedStateEvent; @@ -121,11 +122,11 @@ public void setUp() { .thenReturn(new JobCreationOutput( 1L)); - Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) - .thenReturn(new AttemptCreationOutput( + Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) + .thenReturn(new AttemptNumberCreationOutput( 1)); - Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any(SyncInput.class))) + Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) .thenReturn( new GeneratedJobInput( new JobRunConfig(), @@ -510,7 +511,8 @@ public void cancelRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.CANCELLED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .jobCancelledWithAttemptNumber(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); } @RepeatedTest(10) @@ -594,7 +596,7 @@ public void resetCancelRunningWorkflow() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.RESET && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)); } @@ -687,7 +689,7 @@ public void updatedSignalReceivedWhileRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.UPDATED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)); } } @@ -747,8 +749,10 @@ public void testSourceAndDestinationFailuresRecorded() throws InterruptedExcepti workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); } @RepeatedTest(10) @@ -783,7 +787,8 @@ public void testNormalizationFailure() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); } @RepeatedTest(10) @@ -818,7 +823,8 @@ public void testDbtFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); } @RepeatedTest(10) @@ -853,7 +859,8 @@ public void testPersistenceFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); } @RepeatedTest(10) @@ -888,7 +895,8 @@ public void testReplicationFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); } } @@ -906,12 +914,13 @@ public static Stream getSetupFailingFailingActivityBeforeRun() { return Stream.of( Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewJob(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), - Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) + Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), Arguments.of(new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mJobCreationAndStatusUpdateActivity).reportJobStart(Mockito.any()))), - Arguments.of(new Thread(() -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any())) - .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); + Arguments.of(new Thread( + () -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) + .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); } @ParameterizedTest @@ -996,10 +1005,10 @@ public static Stream getSetupFailingFailingActivityAfterRun() { return Stream.of( Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> System.out.println("do Nothing")), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()))), + .when(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.cancelJob()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()))), + .when(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.deleteConnection()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mConnectionDeletionActivity).deleteConnection(Mockito.any())))); @@ -1044,7 +1053,7 @@ void testGetStuckAfterRun(final Consumer signalSender Assertions.assertThat(events) .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.QUARANTINED && changedStateEvent.isValue()) - .hasSize(1); + .hasSizeGreaterThanOrEqualTo(1); } } @@ -1127,7 +1136,10 @@ public void failedResetContinueAsReset() throws InterruptedException { @DisplayName("Test that we are getting stuck if the report of a failure happen") void testGetStuckAfterRun() throws InterruptedException { Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.any()); + .when(mJobCreationAndStatusUpdateActivity).attemptFailureWithAttemptNumber(Mockito.any()); + + Mockito.when(mConfigFetchActivity.getMaxAttempt()) + .thenReturn(new GetMaxAttemptOutput(3)); final UUID testId = UUID.randomUUID(); final TestStateListener testStateListener = new TestStateListener(); @@ -1206,7 +1218,7 @@ public void failedResetJobWaitsOnRestart() throws InterruptedException { } - private class HasFailureFromOrigin implements ArgumentMatcher { + private class HasFailureFromOrigin implements ArgumentMatcher { private final FailureOrigin expectedFailureOrigin; @@ -1215,26 +1227,26 @@ public HasFailureFromOrigin(final FailureOrigin failureOrigin) { } @Override - public boolean matches(final AttemptFailureInput arg) { + public boolean matches(final AttemptNumberFailureInput arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureOrigin().equals(expectedFailureOrigin)); } } - private class HasCancellationFailure implements ArgumentMatcher { + private class HasCancellationFailure implements ArgumentMatcher { private final long expectedJobId; - private final int expectedAttemptId; + private final int expectedAttemptNumber; - public HasCancellationFailure(final long jobId, final int attemptId) { + public HasCancellationFailure(final long jobId, final int attemptNumber) { this.expectedJobId = jobId; - this.expectedAttemptId = attemptId; + this.expectedAttemptNumber = attemptNumber; } @Override - public boolean matches(final JobCancelledInput arg) { + public boolean matches(final JobCancelledInputWithAttemptNumber arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureType().equals(FailureType.MANUAL_CANCELLATION)) - && arg.getJobId() == expectedJobId && arg.getAttemptId() == expectedAttemptId; + && arg.getJobId() == expectedJobId && arg.getAttemptNumber() == expectedAttemptNumber; } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java new file mode 100644 index 000000000000..bba04f6cea42 --- /dev/null +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling; + +import io.temporal.testing.WorkflowReplayer; +import java.io.File; +import java.net.URL; +import org.junit.jupiter.api.Test; + +// TODO: Auto generation of the input and more scenario coverage +public class WorkflowReplayingTest { + + @Test + public void replaySimpleSuccessfulWorkflow() throws Exception { + final URL historyPath = getClass().getClassLoader().getResource("workflowHistory.json"); + + final File historyFile = new File(historyPath.toURI()); + + WorkflowReplayer.replayWorkflowExecution(historyFile, ConnectionManagerWorkflowImpl.class); + } + +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java index 5f480cf4fb80..f643bc4bd07e 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java @@ -29,6 +29,7 @@ import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; @@ -84,6 +85,7 @@ public class JobCreationAndStatusUpdateActivityTest { private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final long JOB_ID = 123L; private static final int ATTEMPT_ID = 0; + private static final int ATTEMPT_NUMBER = 1; private static final StandardSyncOutput standardSyncOutput = new StandardSyncOutput() .withStandardSyncSummary( new StandardSyncSummary() @@ -164,6 +166,56 @@ public void createAttemptThrowException() throws IOException { .hasCauseInstanceOf(IOException.class); } + @Test + @DisplayName("Test attempt creation") + public void createAttemptNumber() throws IOException { + Mockito.when(mConfigRepository.getDatabase()).thenReturn(Mockito.mock(ExceptionWrappingDatabase.class)); + + final Job mJob = Mockito.mock(Job.class); + + Mockito.when(mJobPersistence.getJob(JOB_ID)) + .thenReturn(mJob); + + final WorkerRun mWorkerRun = Mockito.mock(WorkerRun.class); + + Mockito.when(mTemporalWorkerRunFactory.create(mJob)) + .thenReturn(mWorkerRun); + + final Path mPath = Mockito.mock(Path.class); + final Path path = Path.of("test"); + Mockito.when(mPath.resolve(Mockito.anyString())) + .thenReturn(path); + Mockito.when(mWorkerRun.getJobRoot()) + .thenReturn(mPath); + + Mockito.when(mJobPersistence.createAttempt(JOB_ID, path)) + .thenReturn(ATTEMPT_NUMBER); + + final LogClientSingleton mLogClientSingleton = Mockito.mock(LogClientSingleton.class); + try (final MockedStatic utilities = Mockito.mockStatic(LogClientSingleton.class)) { + utilities.when(() -> LogClientSingleton.getInstance()) + .thenReturn(mLogClientSingleton); + + final AttemptNumberCreationOutput output = jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( + JOB_ID)); + + Mockito.verify(mLogClientSingleton).setJobMdc(mWorkerEnvironment, mLogConfigs, mPath); + Assertions.assertThat(output.getAttemptNumber()).isEqualTo(ATTEMPT_NUMBER); + } + } + + @Test + @DisplayName("Test exception errors are properly wrapped") + public void createAttemptNumberThrowException() throws IOException { + Mockito.when(mJobPersistence.getJob(JOB_ID)) + .thenThrow(new IOException()); + + Assertions.assertThatThrownBy(() -> jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( + JOB_ID))) + .isInstanceOf(RetryableException.class) + .hasCauseInstanceOf(IOException.class); + } + } @Nested diff --git a/airbyte-workers/src/test/resources/workflowHistory.json b/airbyte-workers/src/test/resources/workflowHistory.json new file mode 100644 index 000000000000..a83619c8ea29 --- /dev/null +++ b/airbyte-workers/src/test/resources/workflowHistory.json @@ -0,0 +1,939 @@ +{ + "events": [ + { + "eventId": "1", + "eventTime": "2022-03-08T22:47:57.534705300Z", + "eventType": "WorkflowExecutionStarted", + "taskId": "1048664", + "workflowExecutionStartedEventAttributes": { + "workflowType": { + "name": "ConnectionManagerWorkflow" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" + } + ] + }, + "workflowExecutionTimeout": "0s", + "workflowRunTimeout": "0s", + "workflowTaskTimeout": "10s", + "originalExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", + "identity": "1@3de809efb2ed", + "firstExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", + "retryPolicy": { + "initialInterval": "1s", + "backoffCoefficient": 2, + "maximumInterval": "100s", + "maximumAttempts": 1 + }, + "attempt": 1, + "firstWorkflowTaskBackoff": "0s", + "header": {} + } + }, + { + "eventId": "2", + "eventTime": "2022-03-08T22:47:57.534975800Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048665", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "3", + "eventTime": "2022-03-08T22:47:57.563121800Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048669", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "2", + "identity": "1@2741f9c3f558", + "requestId": "e9cf205d-de02-4139-b00d-fab56c4b9fd7" + } + }, + { + "eventId": "4", + "eventTime": "2022-03-08T22:47:57.646973200Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048672", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "2", + "startedEventId": "3", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "5", + "eventTime": "2022-03-08T22:47:57.647029Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048673", + "activityTaskScheduledEventAttributes": { + "activityId": "c45be44b-784b-3a0c-9473-e80129b65969", + "activityType": { + "name": "GetTimeToWait" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YifQ==" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "4", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "6", + "eventTime": "2022-03-08T22:47:57.668813100Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048677", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "5", + "identity": "1@2741f9c3f558", + "requestId": "df9001bd-0c42-4415-a631-0a37ee3f7698", + "attempt": 1 + } + }, + { + "eventId": "7", + "eventTime": "2022-03-08T22:47:57.856240400Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048678", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJ0aW1lVG9XYWl0IjowLjB9" + } + ] + }, + "scheduledEventId": "5", + "startedEventId": "6", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "8", + "eventTime": "2022-03-08T22:47:57.856293200Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048679", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "9", + "eventTime": "2022-03-08T22:47:57.876328300Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048683", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "8", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "80932560-7d5d-4f5f-9982-561857b07f50" + } + }, + { + "eventId": "10", + "eventTime": "2022-03-08T22:47:57.921753Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048686", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "8", + "startedEventId": "9", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "11", + "eventTime": "2022-03-08T22:47:57.921814200Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048687", + "activityTaskScheduledEventAttributes": { + "activityId": "c7b20a16-db46-3dd1-b8ac-e2a93d3a8e0d", + "activityType": { + "name": "CreateNewJob" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJyZXNldCI6ZmFsc2V9" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "10", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "12", + "eventTime": "2022-03-08T22:47:57.942341Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048691", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "11", + "identity": "1@2741f9c3f558", + "requestId": "29e77ce9-f715-4f19-9fe2-b5b94201d0b3", + "attempt": 1 + } + }, + { + "eventId": "13", + "eventTime": "2022-03-08T22:47:58.268669700Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048692", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MX0=" + } + ] + }, + "scheduledEventId": "11", + "startedEventId": "12", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "14", + "eventTime": "2022-03-08T22:47:58.268723100Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048693", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "15", + "eventTime": "2022-03-08T22:47:58.283301600Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048697", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "14", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "18d2f139-0794-4dfb-b36d-1448df3eb350" + } + }, + { + "eventId": "16", + "eventTime": "2022-03-08T22:47:58.302388600Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048700", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "14", + "startedEventId": "15", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "17", + "eventTime": "2022-03-08T22:47:58.302431Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048701", + "activityTaskScheduledEventAttributes": { + "activityId": "3c1b8fa0-437b-3bc2-a365-352e9a5d765d", + "activityType": { + "name": "CreateNewAttempt" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MX0=" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "16", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "18", + "eventTime": "2022-03-08T22:47:58.314956300Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048705", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "17", + "identity": "1@2741f9c3f558", + "requestId": "0221b660-4f40-4bcf-9e6a-2a9d5898bb91", + "attempt": 1 + } + }, + { + "eventId": "19", + "eventTime": "2022-03-08T22:47:58.400030800Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048706", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJhdHRlbXB0SWQiOjB9" + } + ] + }, + "scheduledEventId": "17", + "startedEventId": "18", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "20", + "eventTime": "2022-03-08T22:47:58.400072800Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048707", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "21", + "eventTime": "2022-03-08T22:47:58.414415400Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048711", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "20", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "e4110bad-579c-4ac2-a3eb-3836d7d6f841" + } + }, + { + "eventId": "22", + "eventTime": "2022-03-08T22:47:58.431563800Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048714", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "20", + "startedEventId": "21", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "23", + "eventTime": "2022-03-08T22:47:58.431607100Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048715", + "activityTaskScheduledEventAttributes": { + "activityId": "5aa065bf-5ef2-3e24-b560-c6b3c1f4e2bc", + "activityType": { + "name": "GetSyncWorkflowInput" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJhdHRlbXB0SWQiOjAsImpvYklkIjoxLCJyZXNldCI6ZmFsc2V9" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "22", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "24", + "eventTime": "2022-03-08T22:47:58.445218800Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048719", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "23", + "identity": "1@2741f9c3f558", + "requestId": "7165cc86-d137-4b0f-906b-a7e52a1074e4", + "attempt": 1 + } + }, + { + "eventId": "25", + "eventTime": "2022-03-08T22:47:58.471038600Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048720", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JSdW5Db25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowfSwic291cmNlTGF1bmNoZXJDb25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowLCJkb2NrZXJJbWFnZSI6ImFpcmJ5dGUvc291cmNlLXBva2VhcGk6MC4xLjQifSwiZGVzdGluYXRpb25MYXVuY2hlckNvbmZpZyI6eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9LCJzeW5jSW5wdXQiOnsibmFtZXNwYWNlRGVmaW5pdGlvbiI6ImRlc3RpbmF0aW9uIiwibmFtZXNwYWNlRm9ybWF0IjoiJHtTT1VSQ0VfTkFNRVNQQUNFfSIsInByZWZpeCI6IiIsInNvdXJjZUNvbmZpZ3VyYXRpb24iOnsicG9rZW1vbl9uYW1lIjoiZGl0dG8ifSwiZGVzdGluYXRpb25Db25maWd1cmF0aW9uIjp7ImRlc3RpbmF0aW9uX3BhdGgiOiIvdG1wIn0sIm9wZXJhdGlvblNlcXVlbmNlIjpbXSwiY2F0YWxvZyI6eyJzdHJlYW1zIjpbeyJzdHJlYW0iOnsibmFtZSI6InBva2Vtb24iLCJqc29uX3NjaGVtYSI6eyJ0eXBlIjoib2JqZWN0IiwiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInByb3BlcnRpZXMiOnsiaWQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZvcm1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19LCJtb3ZlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJtb3ZlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZ3JvdXBfZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uX2dyb3VwIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImxldmVsX2xlYXJuZWRfYXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibW92ZV9sZWFybl9tZXRob2QiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwib3JkZXIiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3RhdHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJlZmZvcnQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYmFzZV9zdGF0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sInR5cGVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidHlwZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19LCJoZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwid2VpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInNwZWNpZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3ByaXRlcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJiYWNrX3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiYWJpbGl0aWVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYWJpbGl0eSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJpc19oaWRkZW4iOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfX19fSwiaGVsZF9pdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJpdGVtIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJyYXJpdHkiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJpc19kZWZhdWx0ICI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19LCJnYW1lX2luZGljZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJnYW1lX2luZGV4Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sImJhc2VfZXhwZXJpZW5jZSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJsb2NhdGlvbl9hcmVhX2VuY291bnRlcnMiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXX0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6ImFwcGVuZCIsInByaW1hcnlfa2V5IjpbXX1dfSwicmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9LCJzb3VyY2VSZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sImRlc3RpbmF0aW9uUmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9fX0=" + } + ] + }, + "scheduledEventId": "23", + "startedEventId": "24", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "26", + "eventTime": "2022-03-08T22:47:58.471218800Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048721", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "27", + "eventTime": "2022-03-08T22:47:58.485851600Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048725", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "26", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "289a7723-efac-4cfa-bad2-f0c022b27421" + } + }, + { + "eventId": "28", + "eventTime": "2022-03-08T22:47:58.513022200Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048728", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "26", + "startedEventId": "27", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "29", + "eventTime": "2022-03-08T22:47:58.513073500Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048729", + "activityTaskScheduledEventAttributes": { + "activityId": "95686aea-a2ac-3e1e-a512-0790d3a4e95f", + "activityType": { + "name": "ReportJobStart" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MX0=" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "28", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "30", + "eventTime": "2022-03-08T22:47:58.528653400Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048733", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "29", + "identity": "1@2741f9c3f558", + "requestId": "0e4b03aa-2493-4f7d-b832-4e98e13551da", + "attempt": 1 + } + }, + { + "eventId": "31", + "eventTime": "2022-03-08T22:47:58.668827900Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048734", + "activityTaskCompletedEventAttributes": { + "scheduledEventId": "29", + "startedEventId": "30", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "32", + "eventTime": "2022-03-08T22:47:58.668874600Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048735", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "33", + "eventTime": "2022-03-08T22:47:58.682929200Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048739", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "32", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "caa61b0a-5d34-48a8-ab16-997d3ba9eab5" + } + }, + { + "eventId": "34", + "eventTime": "2022-03-08T22:47:58.735519100Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048742", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "32", + "startedEventId": "33", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "35", + "eventTime": "2022-03-08T22:47:58.735579100Z", + "eventType": "MarkerRecorded", + "taskId": "1048743", + "markerRecordedEventAttributes": { + "markerName": "Version", + "details": { + "changeId": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "InRhc2tfcXVldWVfY2hhbmdlX2Zyb21fY29ubmVjdGlvbl91cGRhdGVyX3RvX3N5bmMi" + } + ] + }, + "version": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "MQ==" + } + ] + } + }, + "workflowTaskCompletedEventId": "34" + } + }, + { + "eventId": "36", + "eventTime": "2022-03-08T22:47:58.735800Z", + "eventType": "StartChildWorkflowExecutionInitiated", + "taskId": "1048744", + "startChildWorkflowExecutionInitiatedEventAttributes": { + "namespace": "default", + "workflowId": "sync_1", + "workflowType": { + "name": "SyncWorkflow" + }, + "taskQueue": { + "name": "SYNC" + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjB9" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9zb3VyY2UtcG9rZWFwaTowLjEuNCJ9" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJuYW1lc3BhY2VEZWZpbml0aW9uIjoiZGVzdGluYXRpb24iLCJuYW1lc3BhY2VGb3JtYXQiOiIke1NPVVJDRV9OQU1FU1BBQ0V9IiwicHJlZml4IjoiIiwic291cmNlQ29uZmlndXJhdGlvbiI6eyJwb2tlbW9uX25hbWUiOiJkaXR0byJ9LCJkZXN0aW5hdGlvbkNvbmZpZ3VyYXRpb24iOnsiZGVzdGluYXRpb25fcGF0aCI6Ii90bXAifSwib3BlcmF0aW9uU2VxdWVuY2UiOltdLCJjYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJyZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sInNvdXJjZVJlc291cmNlUmVxdWlyZW1lbnRzIjp7fSwiZGVzdGluYXRpb25SZXNvdXJjZVJlcXVpcmVtZW50cyI6e319" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "ImI5MTZmZDg2LWE1YTYtNDhhYy04ODgwLTQ5Nzg3NGNmNTNjZiI=" + } + ] + }, + "workflowExecutionTimeout": "0s", + "workflowRunTimeout": "0s", + "workflowTaskTimeout": "10s", + "parentClosePolicy": "RequestCancel", + "workflowTaskCompletedEventId": "34", + "workflowIdReusePolicy": "AllowDuplicate", + "header": {} + } + }, + { + "eventId": "37", + "eventTime": "2022-03-08T22:47:58.762930500Z", + "eventType": "ChildWorkflowExecutionStarted", + "taskId": "1048747", + "childWorkflowExecutionStartedEventAttributes": { + "namespace": "default", + "initiatedEventId": "36", + "workflowExecution": { + "workflowId": "sync_1", + "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" + }, + "workflowType": { + "name": "SyncWorkflow" + }, + "header": {} + } + }, + { + "eventId": "38", + "eventTime": "2022-03-08T22:47:58.762971900Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048748", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "39", + "eventTime": "2022-03-08T22:47:58.774051900Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048752", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "38", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "ca98144b-47f8-486d-b260-2e1dc42bd68e" + } + }, + { + "eventId": "40", + "eventTime": "2022-03-08T22:47:58.794905100Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048755", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "38", + "startedEventId": "39", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "41", + "eventTime": "2022-03-08T22:48:02.408058300Z", + "eventType": "ChildWorkflowExecutionCompleted", + "taskId": "1048757", + "childWorkflowExecutionCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJzdGFuZGFyZFN5bmNTdW1tYXJ5Ijp7InN0YXR1cyI6ImNvbXBsZXRlZCIsInJlY29yZHNTeW5jZWQiOjEsImJ5dGVzU3luY2VkIjoyMjcxNSwic3RhcnRUaW1lIjoxNjQ2Nzc5Njc4OTM5LCJlbmRUaW1lIjoxNjQ2Nzc5NjgyMjM4LCJ0b3RhbFN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJzdGF0ZU1lc3NhZ2VzRW1pdHRlZCI6MCwicmVjb3Jkc0NvbW1pdHRlZCI6MX0sInN0cmVhbVN0YXRzIjpbeyJzdHJlYW1OYW1lIjoicG9rZW1vbiIsInN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJyZWNvcmRzQ29tbWl0dGVkIjoxfX1dfSwib3V0cHV0X2NhdGFsb2ciOnsic3RyZWFtcyI6W3sic3RyZWFtIjp7Im5hbWUiOiJwb2tlbW9uIiwianNvbl9zY2hlbWEiOnsidHlwZSI6Im9iamVjdCIsIiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJwcm9wZXJ0aWVzIjp7ImlkIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmb3JtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fSwibW92ZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsibW92ZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2dyb3VwX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbl9ncm91cCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJsZXZlbF9sZWFybmVkX2F0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm1vdmVfbGVhcm5fbWV0aG9kIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sIm9yZGVyIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInN0YXRzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InN0YXQiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZWZmb3J0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImJhc2Vfc3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJ0eXBlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInR5cGUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fSwiaGVpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIndlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzcGVjaWVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInNwcml0ZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiYmFja19zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImFiaWxpdGllcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImFiaWxpdHkiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiaXNfaGlkZGVuIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX19fX0sImhlbGRfaXRlbXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiaXRlbSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsicmFyaXR5Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwiaXNfZGVmYXVsdCAiOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfSwiZ2FtZV9pbmRpY2VzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZ2FtZV9pbmRleCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJiYXNlX2V4cGVyaWVuY2UiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibG9jYXRpb25fYXJlYV9lbmNvdW50ZXJzIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W119LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJhcHBlbmQiLCJwcmltYXJ5X2tleSI6W119XX0sImZhaWx1cmVzIjpbXX0=" + } + ] + }, + "namespace": "default", + "workflowExecution": { + "workflowId": "sync_1", + "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" + }, + "workflowType": { + "name": "SyncWorkflow" + }, + "initiatedEventId": "36", + "startedEventId": "37" + } + }, + { + "eventId": "42", + "eventTime": "2022-03-08T22:48:02.408127200Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048758", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "43", + "eventTime": "2022-03-08T22:48:02.422112800Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048762", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "42", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "c0916d42-c83c-4e9d-805a-29ca5f979624" + } + }, + { + "eventId": "44", + "eventTime": "2022-03-08T22:48:02.454203Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048765", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "42", + "startedEventId": "43", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "45", + "eventTime": "2022-03-08T22:48:02.454256Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048766", + "activityTaskScheduledEventAttributes": { + "activityId": "b169a729-47bc-38f7-a315-c1a4b6d96466", + "activityType": { + "name": "JobSuccess" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MSwiYXR0ZW1wdElkIjowLCJzdGFuZGFyZFN5bmNPdXRwdXQiOnsic3RhbmRhcmRTeW5jU3VtbWFyeSI6eyJzdGF0dXMiOiJjb21wbGV0ZWQiLCJyZWNvcmRzU3luY2VkIjoxLCJieXRlc1N5bmNlZCI6MjI3MTUsInN0YXJ0VGltZSI6MTY0Njc3OTY3ODkzOSwiZW5kVGltZSI6MTY0Njc3OTY4MjIzOCwidG90YWxTdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwic3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjAsInJlY29yZHNDb21taXR0ZWQiOjF9LCJzdHJlYW1TdGF0cyI6W3sic3RyZWFtTmFtZSI6InBva2Vtb24iLCJzdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwicmVjb3Jkc0NvbW1pdHRlZCI6MX19XX0sIm91dHB1dF9jYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJmYWlsdXJlcyI6W119fQ==" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "44", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "46", + "eventTime": "2022-03-08T22:48:02.437049800Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048770", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "45", + "identity": "1@2741f9c3f558", + "requestId": "9d2fc180-ec33-42a2-a259-d29afb281992", + "attempt": 1 + } + }, + { + "eventId": "47", + "eventTime": "2022-03-08T22:48:02.664164100Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048771", + "activityTaskCompletedEventAttributes": { + "scheduledEventId": "45", + "startedEventId": "46", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "48", + "eventTime": "2022-03-08T22:48:02.664217700Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048772", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "49", + "eventTime": "2022-03-08T22:48:02.676895300Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048776", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "48", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "b43c8f30-2500-47d6-a8f6-aa2cd0d99218" + } + }, + { + "eventId": "50", + "eventTime": "2022-03-08T22:48:02.709745Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048779", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "48", + "startedEventId": "49", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "51", + "eventTime": "2022-03-08T22:48:02.709811400Z", + "eventType": "WorkflowExecutionContinuedAsNew", + "taskId": "1048780", + "workflowExecutionContinuedAsNewEventAttributes": { + "newExecutionRunId": "e81cf38b-7f11-4eeb-8c85-301778bf2671", + "workflowType": { + "name": "ConnectionManagerWorkflow" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" + } + ] + }, + "workflowRunTimeout": "0s", + "workflowTaskTimeout": "10s", + "workflowTaskCompletedEventId": "50", + "header": {} + } + } + ] +} From 0b6456506ec7baf51269a4081a9ec5b28251df16 Mon Sep 17 00:00:00 2001 From: Parker Mossman Date: Thu, 10 Mar 2022 16:45:52 -0800 Subject: [PATCH 13/38] Revert "add /tmp emptyDir volume to connector pods (#10761)" (#11053) This reverts commit 921f4a13c630ac51840cd7bfb7e9077ec630723f. --- .../airbyte/workers/process/KubePodProcess.java | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java index 35acfaff01a7..339277295132 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java @@ -113,7 +113,6 @@ public class KubePodProcess extends Process implements KubePod { private static final String STDOUT_PIPE_FILE = PIPES_DIR + "/stdout"; private static final String STDERR_PIPE_FILE = PIPES_DIR + "/stderr"; public static final String CONFIG_DIR = "/config"; - public static final String TMP_DIR = "/tmp"; private static final String TERMINATION_DIR = "/termination"; private static final String TERMINATION_FILE_MAIN = TERMINATION_DIR + "/main"; private static final String TERMINATION_FILE_CHECK = TERMINATION_DIR + "/check"; @@ -427,24 +426,13 @@ public KubePodProcess(final boolean isOrchestrator, .withMountPath(TERMINATION_DIR) .build(); - final Volume tmpVolume = new VolumeBuilder() - .withName("tmp") - .withNewEmptyDir() - .endEmptyDir() - .build(); - - final VolumeMount tmpVolumeMount = new VolumeMountBuilder() - .withName("tmp") - .withMountPath(TMP_DIR) - .build(); - final Container init = getInit(usesStdin, List.of(pipeVolumeMount, configVolumeMount), busyboxImage); final Container main = getMain( image, imagePullPolicy, usesStdin, entrypointOverride, - List.of(pipeVolumeMount, configVolumeMount, terminationVolumeMount, tmpVolumeMount), + List.of(pipeVolumeMount, configVolumeMount, terminationVolumeMount), resourceRequirements, internalToExternalPorts, envMap, @@ -512,7 +500,7 @@ public KubePodProcess(final boolean isOrchestrator, .withRestartPolicy("Never") .withInitContainers(init) .withContainers(containers) - .withVolumes(pipeVolume, configVolume, terminationVolume, tmpVolume) + .withVolumes(pipeVolume, configVolume, terminationVolume) .endSpec() .build(); From f9b6b84faae53715310a1b5805138f61b64d11b5 Mon Sep 17 00:00:00 2001 From: Parker Mossman Date: Thu, 10 Mar 2022 17:40:24 -0800 Subject: [PATCH 14/38] Put getState() and isRunning() call within same try/catch block (#11013) * put getState() and isRunning() call within same try/catch block * format --- .../workers/temporal/TemporalClient.java | 53 ++++++++++++------- .../workers/temporal/TemporalClientTest.java | 6 +-- 2 files changed, 37 insertions(+), 22 deletions(-) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java index b0dabab0a2bc..c843954f4a23 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalClient.java @@ -239,7 +239,7 @@ public void submitConnectionUpdaterAsync(final UUID connectionId) { try { do { Thread.sleep(DELAY_BETWEEN_QUERY_MS); - } while (!isWorkflowRunning(getConnectionManagerName(connectionId))); + } while (!isWorkflowReachable(getConnectionManagerName(connectionId))); } catch (final InterruptedException e) {} return null; @@ -274,11 +274,11 @@ public static class ManualSyncSubmissionResult { public ManualSyncSubmissionResult startNewManualSync(final UUID connectionId) { log.info("Manual sync request"); - final boolean isWorflowRunning = isWorkflowRunning(getConnectionManagerName(connectionId)); + final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - if (!isWorflowRunning) { + if (!workflowReachable) { return new ManualSyncSubmissionResult( - Optional.of("No scheduler workflow is running for: " + connectionId), + Optional.of("No scheduler workflow is reachable for: " + connectionId), Optional.empty()); } @@ -325,12 +325,12 @@ public class NewCancellationSubmissionResult { public ManualSyncSubmissionResult startNewCancelation(final UUID connectionId) { log.info("Manual sync request"); - final boolean isWorflowRunning = isWorkflowRunning(getConnectionManagerName(connectionId)); + final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - if (!isWorflowRunning) { + if (!workflowReachable) { log.error("Can't cancel a non running workflow"); return new ManualSyncSubmissionResult( - Optional.of("No scheduler workflow is running for: " + connectionId), + Optional.of("No scheduler workflow is reachable for: " + connectionId), Optional.empty()); } @@ -347,7 +347,7 @@ public ManualSyncSubmissionResult startNewCancelation(final UUID connectionId) { Optional.of("Didn't manage cancel a sync for: " + connectionId), Optional.empty()); } - } while (connectionManagerWorkflow.getState().isRunning()); + } while (isWorkflowStateRunning(getConnectionManagerName(connectionId))); log.info("end of manual cancellation"); @@ -361,12 +361,12 @@ public ManualSyncSubmissionResult startNewCancelation(final UUID connectionId) { public ManualSyncSubmissionResult resetConnection(final UUID connectionId) { log.info("reset sync request"); - final boolean isWorflowRunning = isWorkflowRunning(getConnectionManagerName(connectionId)); + final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - if (!isWorflowRunning) { - log.error("Can't reset a non running workflow"); + if (!workflowReachable) { + log.error("Can't reset a non-reachable workflow"); return new ManualSyncSubmissionResult( - Optional.of("No scheduler workflow is running for: " + connectionId), + Optional.of("No scheduler workflow is reachable for: " + connectionId), Optional.empty()); } @@ -445,10 +445,10 @@ private T getExistingWorkflow(final Class workflowClass, final String nam } private ConnectionManagerWorkflow getConnectionUpdateWorkflow(final UUID connectionId) { - final boolean isWorflowRunning = isWorkflowRunning(getConnectionManagerName(connectionId)); + final boolean workflowReachable = isWorkflowReachable(getConnectionManagerName(connectionId)); - if (!isWorflowRunning) { - throw new IllegalStateException("No running workflow for the connection {} while trying to delete it"); + if (!workflowReachable) { + throw new IllegalStateException("No reachable workflow for the connection {} while trying to delete it"); } final ConnectionManagerWorkflow connectionManagerWorkflow = @@ -476,10 +476,11 @@ TemporalResponse execute(final JobRunConfig jobRunConfig, final Supplier< } /** - * Check if a workflow is currently running. Running means that it is query-able, thus we check that - * we can properly launch a query + * Check if a workflow is reachable for signal calls by attempting to query for current state. If + * the query succeeds, the workflow is reachable. */ - public boolean isWorkflowRunning(final String workflowName) { + @VisibleForTesting + boolean isWorkflowReachable(final String workflowName) { try { final ConnectionManagerWorkflow connectionManagerWorkflow = getExistingWorkflow(ConnectionManagerWorkflow.class, workflowName); connectionManagerWorkflow.getState(); @@ -490,8 +491,22 @@ public boolean isWorkflowRunning(final String workflowName) { } } + /** + * Check if a workflow is reachable and has state {@link WorkflowState#isRunning()} + */ + @VisibleForTesting + boolean isWorkflowStateRunning(final String workflowName) { + try { + final ConnectionManagerWorkflow connectionManagerWorkflow = getExistingWorkflow(ConnectionManagerWorkflow.class, workflowName); + + return connectionManagerWorkflow.getState().isRunning(); + } catch (final Exception e) { + return false; + } + } + @VisibleForTesting - public static String getConnectionManagerName(final UUID connectionId) { + static String getConnectionManagerName(final UUID connectionId) { return "connection_manager_" + connectionId; } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java index d675b27d4304..fa9c9e7e3763 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalClientTest.java @@ -203,7 +203,7 @@ void testSubmitSync() { @Test public void testSynchronousResetConnection() { - ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); + final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); final long jobId1 = 1L; final long jobId2 = 2L; final long jobId3 = 3L; @@ -216,11 +216,11 @@ public void testSynchronousResetConnection() { new JobInformation(jobId3, 0), new JobInformation(jobId3, 0)); - doReturn(true).when(temporalClient).isWorkflowRunning(anyString()); + doReturn(true).when(temporalClient).isWorkflowReachable(anyString()); when(workflowClient.newWorkflowStub(any(Class.class), anyString())).thenReturn(mConnectionManagerWorkflow); - ManualSyncSubmissionResult manualSyncSubmissionResult = temporalClient.synchronousResetConnection(CONNECTION_ID); + final ManualSyncSubmissionResult manualSyncSubmissionResult = temporalClient.synchronousResetConnection(CONNECTION_ID); verify(mConnectionManagerWorkflow).resetConnection(); From 5fd0eac7a5c887357b47138fd0677706917e71d2 Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Thu, 10 Mar 2022 18:04:01 -0800 Subject: [PATCH 15/38] Revert "Remove the attemptId notion in the connectionManagerWorkflow (#10780)" (#11057) This reverts commit 99338c852a33b07d63ca1c5cb53695e3e51bcba4. --- .../scheduler/persistence/JobPersistence.java | 6 +- airbyte-workers/build.gradle | 2 +- .../ConnectionManagerWorkflowImpl.java | 134 +-- .../activities/GenerateInputActivity.java | 17 - .../activities/GenerateInputActivityImpl.java | 8 - .../JobCreationAndStatusUpdateActivity.java | 70 -- ...obCreationAndStatusUpdateActivityImpl.java | 43 - .../state/WorkflowInternalState.java | 2 +- .../ConnectionManagerWorkflowTest.java | 74 +- .../scheduling/WorkflowReplayingTest.java | 24 - ...obCreationAndStatusUpdateActivityTest.java | 52 - .../src/test/resources/workflowHistory.json | 939 ------------------ 12 files changed, 67 insertions(+), 1304 deletions(-) delete mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java delete mode 100644 airbyte-workers/src/test/resources/workflowHistory.json diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java index 20db344f2fd6..cda5b52f5864 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java @@ -75,12 +75,12 @@ public interface JobPersistence { // /** - * Create a new attempt for a job and return its attempt number. Throws - * {@link IllegalStateException} if the job is already in a terminal state. + * Create a new attempt for a job. Throws {@link IllegalStateException} if the job is already in a + * terminal state. * * @param jobId job for which an attempt will be created * @param logPath path where logs should be written for the attempt - * @return The attempt number of the created attempt (see {@link DefaultJobPersistence}) + * @return id of the attempt * @throws IOException exception due to interaction with persistence */ int createAttempt(long jobId, Path logPath) throws IOException; diff --git a/airbyte-workers/build.gradle b/airbyte-workers/build.gradle index ea4d418ed7c0..e37a5b9a4482 100644 --- a/airbyte-workers/build.gradle +++ b/airbyte-workers/build.gradle @@ -33,7 +33,7 @@ dependencies { implementation project(':airbyte-scheduler:models') testImplementation 'io.temporal:temporal-testing:1.8.1' - testImplementation 'com.jayway.jsonpath:json-path:2.7.0' + testImplementation 'io.temporal:temporal-testing-junit5:1.5.0' // versioned separately from rest of temporal testImplementation "org.flywaydb:flyway-core:7.14.0" testImplementation 'org.mockito:mockito-inline:4.0.0' testImplementation 'org.postgresql:postgresql:42.2.18' diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index 16ecec71a2fb..c654f034880b 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -21,20 +21,15 @@ import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; -import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.ReportJobStartInput; import io.airbyte.workers.temporal.scheduling.shared.ActivityConfiguration; import io.airbyte.workers.temporal.scheduling.state.WorkflowInternalState; @@ -65,9 +60,6 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow private static final int TASK_QUEUE_CHANGE_CURRENT_VERSION = 1; private static final int AUTO_DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION = 1; - private static final String RENAME_ATTEMPT_ID_TO_NUMBER_TAG = "rename_attempt_id_to_number"; - private static final int RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION = 1; - private WorkflowState workflowState = new WorkflowState(UUID.randomUUID(), new NoopStateListener()); private final WorkflowInternalState workflowInternalState = new WorkflowInternalState(); @@ -155,7 +147,7 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn workflowInternalState.setJobId(getOrCreateJobId(connectionUpdaterInput)); - workflowInternalState.setAttemptNumber(createAttempt(workflowInternalState.getJobId())); + workflowInternalState.setAttemptId(createAttemptId(workflowInternalState.getJobId())); final GeneratedJobInput jobInputs = getJobInput(); @@ -191,13 +183,13 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn af.getActivityType(), af.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber())); + workflowInternalState.getAttemptId())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } else { workflowInternalState.getFailures().add( FailureHelper.unknownOriginFailure(childWorkflowFailure.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber())); + workflowInternalState.getAttemptId())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } @@ -207,41 +199,20 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn private void reportSuccess(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { workflowState.setSuccess(true); - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput)); - } else { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccessWithAttemptNumber, new JobSuccessInputWithAttemptNumber( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput)); - } + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptId(), + standardSyncOutput)); resetNewConnectionInput(connectionUpdaterInput); } private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput, - FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); - } else { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailureWithAttemptNumber, new AttemptNumberFailureInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput, - FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); - } + runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptId(), + standardSyncOutput, + FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); final int maxAttempt = configFetchActivity.getMaxAttempt().getMaxAttempt(); final int attemptNumber = connectionUpdaterInput.getAttemptNumber(); @@ -335,20 +306,20 @@ public WorkflowState getState() { @Override public JobInformation getJobInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final Integer attemptId = workflowInternalState.getAttemptId(); return new JobInformation( jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber); + attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId); } @Override public QuarantinedInformation getQuarantinedInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final Integer attemptId = workflowInternalState.getAttemptId(); return new QuarantinedInformation( connectionId, jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber, + attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId, workflowState.isQuarantined()); } @@ -448,31 +419,15 @@ private Long getOrCreateJobId(final ConnectionUpdaterInput connectionUpdaterInpu /** * Create a new attempt for a given jobId - * - * @param jobId - the jobId associated with the new attempt - * - * @return The attempt number */ - private Integer createAttempt(final long jobId) { - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - // Retrieve the attempt number but name it attempt id - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - final AttemptCreationOutput attemptCreationOutput = - runMandatoryActivityWithOutput( - jobCreationAndStatusUpdateActivity::createNewAttempt, - new AttemptCreationInput( - jobId)); - return attemptCreationOutput.getAttemptId(); - } - - final AttemptNumberCreationOutput attemptNumberCreationOutput = + private Integer createAttemptId(final long jobId) { + final AttemptCreationOutput attemptCreationOutput = runMandatoryActivityWithOutput( - jobCreationAndStatusUpdateActivity::createNewAttemptNumber, + jobCreationAndStatusUpdateActivity::createNewAttempt, new AttemptCreationInput( jobId)); - return attemptNumberCreationOutput.getAttemptNumber(); + + return attemptCreationOutput.getAttemptId(); } /** @@ -481,30 +436,14 @@ private Integer createAttempt(final long jobId) { */ private GeneratedJobInput getJobInput() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - final SyncInput getSyncInputActivitySyncInput = new SyncInput( - attemptNumber, - jobId, - workflowState.isResetConnection()); - - final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( - getSyncInputActivity::getSyncWorkflowInput, - getSyncInputActivitySyncInput); - - return syncWorkflowInputs; - } - - final SyncInputWithAttemptNumber getSyncInputActivitySyncInput = new SyncInputWithAttemptNumber( - attemptNumber, + final Integer attemptId = workflowInternalState.getAttemptId(); + final SyncInput getSyncInputActivitySyncInput = new SyncInput( + attemptId, jobId, workflowState.isResetConnection()); final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( - getSyncInputActivity::getSyncWorkflowInputWithAttemptNumber, + getSyncInputActivity::getSyncWorkflowInput, getSyncInputActivitySyncInput); return syncWorkflowInputs; @@ -590,25 +529,14 @@ private void deleteConnectionBeforeTerminatingTheWorkflow() { private void reportCancelledAndContinueWith(final boolean isReset, final ConnectionUpdaterInput connectionUpdaterInput) { workflowState.setContinueAsReset(isReset); final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final Integer attemptId = workflowInternalState.getAttemptId(); final Set failures = workflowInternalState.getFailures(); final Boolean partialSuccess = workflowInternalState.getPartialSuccess(); - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, - new JobCancelledInput( - jobId, - attemptNumber, - FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); - } else { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelledWithAttemptNumber, - new JobCancelledInputWithAttemptNumber( - jobId, - attemptNumber, - FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); - } + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, + new JobCancelledInput( + jobId, + attemptId, + FailureHelper.failureSummaryForCancellation(jobId, attemptId, failures, partialSuccess))); resetNewConnectionInput(connectionUpdaterInput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java index 60191311ef34..0a1ed70c3008 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java @@ -27,17 +27,6 @@ class SyncInput { } - @Data - @NoArgsConstructor - @AllArgsConstructor - class SyncInputWithAttemptNumber { - - private int attemptNumber; - private long jobId; - private boolean reset; - - } - @Data @NoArgsConstructor @AllArgsConstructor @@ -56,10 +45,4 @@ class GeneratedJobInput { @ActivityMethod GeneratedJobInput getSyncWorkflowInput(SyncInput input); - /** - * This generate the input needed by the child sync workflow - */ - @ActivityMethod - GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(SyncInputWithAttemptNumber input); - } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java index bcb939d022c8..30edaf108adc 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java @@ -76,12 +76,4 @@ public GeneratedJobInput getSyncWorkflowInput(final SyncInput input) { } } - @Override - public GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(final SyncInputWithAttemptNumber input) { - return getSyncWorkflowInput(new SyncInput( - input.getAttemptNumber(), - input.getJobId(), - input.isReset())); - } - } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java index 2de587c2a15b..aa45b53b0e8c 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java @@ -72,24 +72,6 @@ class AttemptCreationOutput { @ActivityMethod AttemptCreationOutput createNewAttempt(AttemptCreationInput input) throws RetryableException; - @Data - @NoArgsConstructor - @AllArgsConstructor - class AttemptNumberCreationOutput { - - private int attemptNumber; - - } - - /** - * Create a new attempt for a given job ID - * - * @param input POJO containing the jobId - * @return A POJO containing the attemptNumber - */ - @ActivityMethod - AttemptNumberCreationOutput createNewAttemptNumber(AttemptCreationInput input) throws RetryableException; - @Data @NoArgsConstructor @AllArgsConstructor @@ -107,23 +89,6 @@ class JobSuccessInput { @ActivityMethod void jobSuccess(JobSuccessInput input); - @Data - @NoArgsConstructor - @AllArgsConstructor - class JobSuccessInputWithAttemptNumber { - - private long jobId; - private int attemptNumber; - private StandardSyncOutput standardSyncOutput; - - } - - /** - * Set a job status as successful - */ - @ActivityMethod - void jobSuccessWithAttemptNumber(JobSuccessInputWithAttemptNumber input); - @Data @NoArgsConstructor @AllArgsConstructor @@ -158,24 +123,6 @@ class AttemptFailureInput { @ActivityMethod void attemptFailure(AttemptFailureInput input); - @Data - @NoArgsConstructor - @AllArgsConstructor - class AttemptNumberFailureInput { - - private long jobId; - private int attemptNumber; - private StandardSyncOutput standardSyncOutput; - private AttemptFailureSummary attemptFailureSummary; - - } - - /** - * Set an attempt status as failed - */ - @ActivityMethod - void attemptFailureWithAttemptNumber(AttemptNumberFailureInput input); - @Data @NoArgsConstructor @AllArgsConstructor @@ -193,23 +140,6 @@ class JobCancelledInput { @ActivityMethod void jobCancelled(JobCancelledInput input); - @Data - @NoArgsConstructor - @AllArgsConstructor - class JobCancelledInputWithAttemptNumber { - - private long jobId; - private int attemptNumber; - private AttemptFailureSummary attemptFailureSummary; - - } - - /** - * Set a job status as cancelled - */ - @ActivityMethod - void jobCancelledWithAttemptNumber(JobCancelledInputWithAttemptNumber input); - @Data @NoArgsConstructor @AllArgsConstructor diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java index b9a76065e776..e02796675174 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java @@ -128,24 +128,6 @@ public AttemptCreationOutput createNewAttempt(final AttemptCreationInput input) } } - @Override - public AttemptNumberCreationOutput createNewAttemptNumber(final AttemptCreationInput input) throws RetryableException { - try { - final long jobId = input.getJobId(); - final Job createdJob = jobPersistence.getJob(jobId); - - final WorkerRun workerRun = temporalWorkerRunFactory.create(createdJob); - final Path logFilePath = workerRun.getJobRoot().resolve(LogClientSingleton.LOG_FILENAME); - final int persistedAttemptNumber = jobPersistence.createAttempt(jobId, logFilePath); - emitJobIdToReleaseStagesMetric(MetricsRegistry.ATTEMPT_CREATED_BY_RELEASE_STAGE, jobId); - - LogClientSingleton.getInstance().setJobMdc(workerEnvironment, logConfigs, workerRun.getJobRoot()); - return new AttemptNumberCreationOutput(persistedAttemptNumber); - } catch (final IOException e) { - throw new RetryableException(e); - } - } - @Override public void jobSuccess(final JobSuccessInput input) { try { @@ -170,14 +152,6 @@ public void jobSuccess(final JobSuccessInput input) { } } - @Override - public void jobSuccessWithAttemptNumber(final JobSuccessInputWithAttemptNumber input) { - jobSuccess(new JobSuccessInput( - input.getJobId(), - input.getAttemptNumber(), - input.getStandardSyncOutput())); - } - @Override public void jobFailure(final JobFailureInput input) { try { @@ -217,15 +191,6 @@ public void attemptFailure(final AttemptFailureInput input) { } } - @Override - public void attemptFailureWithAttemptNumber(final AttemptNumberFailureInput input) { - attemptFailure(new AttemptFailureInput( - input.getJobId(), - input.getAttemptNumber(), - input.getStandardSyncOutput(), - input.getAttemptFailureSummary())); - } - @Override public void jobCancelled(final JobCancelledInput input) { try { @@ -244,14 +209,6 @@ public void jobCancelled(final JobCancelledInput input) { } } - @Override - public void jobCancelledWithAttemptNumber(final JobCancelledInputWithAttemptNumber input) { - jobCancelled(new JobCancelledInput( - input.getJobId(), - input.getAttemptNumber(), - input.getAttemptFailureSummary())); - } - @Override public void reportJobStart(final ReportJobStartInput input) { try { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java index d9bdcb2d5800..822bbfe0ba48 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java @@ -17,7 +17,7 @@ public class WorkflowInternalState { private Long jobId = null; - private Integer attemptNumber = null; + private Integer attemptId = null; // StandardSyncOutput standardSyncOutput = null; private final Set failures = new HashSet<>(); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index 8353b9116940..ca897b5afe80 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -16,14 +16,13 @@ import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; -import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; +import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.state.WorkflowState; import io.airbyte.workers.temporal.scheduling.state.listener.TestStateListener; import io.airbyte.workers.temporal.scheduling.state.listener.WorkflowStateChangedListener.ChangedStateEvent; @@ -122,11 +121,11 @@ public void setUp() { .thenReturn(new JobCreationOutput( 1L)); - Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) - .thenReturn(new AttemptNumberCreationOutput( + Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) + .thenReturn(new AttemptCreationOutput( 1)); - Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) + Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any(SyncInput.class))) .thenReturn( new GeneratedJobInput( new JobRunConfig(), @@ -511,8 +510,7 @@ public void cancelRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.CANCELLED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .jobCancelledWithAttemptNumber(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); } @RepeatedTest(10) @@ -596,7 +594,7 @@ public void resetCancelRunningWorkflow() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.RESET && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()); } @@ -689,7 +687,7 @@ public void updatedSignalReceivedWhileRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.UPDATED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()); } } @@ -749,10 +747,8 @@ public void testSourceAndDestinationFailuresRecorded() throws InterruptedExcepti workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); } @RepeatedTest(10) @@ -787,8 +783,7 @@ public void testNormalizationFailure() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); } @RepeatedTest(10) @@ -823,8 +818,7 @@ public void testDbtFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); } @RepeatedTest(10) @@ -859,8 +853,7 @@ public void testPersistenceFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); } @RepeatedTest(10) @@ -895,8 +888,7 @@ public void testReplicationFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); } } @@ -914,13 +906,12 @@ public static Stream getSetupFailingFailingActivityBeforeRun() { return Stream.of( Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewJob(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), - Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) + Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), Arguments.of(new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mJobCreationAndStatusUpdateActivity).reportJobStart(Mockito.any()))), - Arguments.of(new Thread( - () -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) - .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); + Arguments.of(new Thread(() -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any())) + .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); } @ParameterizedTest @@ -1005,10 +996,10 @@ public static Stream getSetupFailingFailingActivityAfterRun() { return Stream.of( Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> System.out.println("do Nothing")), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)))), + .when(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.cancelJob()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)))), + .when(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.deleteConnection()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mConnectionDeletionActivity).deleteConnection(Mockito.any())))); @@ -1053,7 +1044,7 @@ void testGetStuckAfterRun(final Consumer signalSender Assertions.assertThat(events) .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.QUARANTINED && changedStateEvent.isValue()) - .hasSizeGreaterThanOrEqualTo(1); + .hasSize(1); } } @@ -1136,10 +1127,7 @@ public void failedResetContinueAsReset() throws InterruptedException { @DisplayName("Test that we are getting stuck if the report of a failure happen") void testGetStuckAfterRun() throws InterruptedException { Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).attemptFailureWithAttemptNumber(Mockito.any()); - - Mockito.when(mConfigFetchActivity.getMaxAttempt()) - .thenReturn(new GetMaxAttemptOutput(3)); + .when(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.any()); final UUID testId = UUID.randomUUID(); final TestStateListener testStateListener = new TestStateListener(); @@ -1218,7 +1206,7 @@ public void failedResetJobWaitsOnRestart() throws InterruptedException { } - private class HasFailureFromOrigin implements ArgumentMatcher { + private class HasFailureFromOrigin implements ArgumentMatcher { private final FailureOrigin expectedFailureOrigin; @@ -1227,26 +1215,26 @@ public HasFailureFromOrigin(final FailureOrigin failureOrigin) { } @Override - public boolean matches(final AttemptNumberFailureInput arg) { + public boolean matches(final AttemptFailureInput arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureOrigin().equals(expectedFailureOrigin)); } } - private class HasCancellationFailure implements ArgumentMatcher { + private class HasCancellationFailure implements ArgumentMatcher { private final long expectedJobId; - private final int expectedAttemptNumber; + private final int expectedAttemptId; - public HasCancellationFailure(final long jobId, final int attemptNumber) { + public HasCancellationFailure(final long jobId, final int attemptId) { this.expectedJobId = jobId; - this.expectedAttemptNumber = attemptNumber; + this.expectedAttemptId = attemptId; } @Override - public boolean matches(final JobCancelledInputWithAttemptNumber arg) { + public boolean matches(final JobCancelledInput arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureType().equals(FailureType.MANUAL_CANCELLATION)) - && arg.getJobId() == expectedJobId && arg.getAttemptNumber() == expectedAttemptNumber; + && arg.getJobId() == expectedJobId && arg.getAttemptId() == expectedAttemptId; } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java deleted file mode 100644 index bba04f6cea42..000000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2021 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling; - -import io.temporal.testing.WorkflowReplayer; -import java.io.File; -import java.net.URL; -import org.junit.jupiter.api.Test; - -// TODO: Auto generation of the input and more scenario coverage -public class WorkflowReplayingTest { - - @Test - public void replaySimpleSuccessfulWorkflow() throws Exception { - final URL historyPath = getClass().getClassLoader().getResource("workflowHistory.json"); - - final File historyFile = new File(historyPath.toURI()); - - WorkflowReplayer.replayWorkflowExecution(historyFile, ConnectionManagerWorkflowImpl.class); - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java index f643bc4bd07e..5f480cf4fb80 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java @@ -29,7 +29,6 @@ import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; @@ -85,7 +84,6 @@ public class JobCreationAndStatusUpdateActivityTest { private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final long JOB_ID = 123L; private static final int ATTEMPT_ID = 0; - private static final int ATTEMPT_NUMBER = 1; private static final StandardSyncOutput standardSyncOutput = new StandardSyncOutput() .withStandardSyncSummary( new StandardSyncSummary() @@ -166,56 +164,6 @@ public void createAttemptThrowException() throws IOException { .hasCauseInstanceOf(IOException.class); } - @Test - @DisplayName("Test attempt creation") - public void createAttemptNumber() throws IOException { - Mockito.when(mConfigRepository.getDatabase()).thenReturn(Mockito.mock(ExceptionWrappingDatabase.class)); - - final Job mJob = Mockito.mock(Job.class); - - Mockito.when(mJobPersistence.getJob(JOB_ID)) - .thenReturn(mJob); - - final WorkerRun mWorkerRun = Mockito.mock(WorkerRun.class); - - Mockito.when(mTemporalWorkerRunFactory.create(mJob)) - .thenReturn(mWorkerRun); - - final Path mPath = Mockito.mock(Path.class); - final Path path = Path.of("test"); - Mockito.when(mPath.resolve(Mockito.anyString())) - .thenReturn(path); - Mockito.when(mWorkerRun.getJobRoot()) - .thenReturn(mPath); - - Mockito.when(mJobPersistence.createAttempt(JOB_ID, path)) - .thenReturn(ATTEMPT_NUMBER); - - final LogClientSingleton mLogClientSingleton = Mockito.mock(LogClientSingleton.class); - try (final MockedStatic utilities = Mockito.mockStatic(LogClientSingleton.class)) { - utilities.when(() -> LogClientSingleton.getInstance()) - .thenReturn(mLogClientSingleton); - - final AttemptNumberCreationOutput output = jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( - JOB_ID)); - - Mockito.verify(mLogClientSingleton).setJobMdc(mWorkerEnvironment, mLogConfigs, mPath); - Assertions.assertThat(output.getAttemptNumber()).isEqualTo(ATTEMPT_NUMBER); - } - } - - @Test - @DisplayName("Test exception errors are properly wrapped") - public void createAttemptNumberThrowException() throws IOException { - Mockito.when(mJobPersistence.getJob(JOB_ID)) - .thenThrow(new IOException()); - - Assertions.assertThatThrownBy(() -> jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( - JOB_ID))) - .isInstanceOf(RetryableException.class) - .hasCauseInstanceOf(IOException.class); - } - } @Nested diff --git a/airbyte-workers/src/test/resources/workflowHistory.json b/airbyte-workers/src/test/resources/workflowHistory.json deleted file mode 100644 index a83619c8ea29..000000000000 --- a/airbyte-workers/src/test/resources/workflowHistory.json +++ /dev/null @@ -1,939 +0,0 @@ -{ - "events": [ - { - "eventId": "1", - "eventTime": "2022-03-08T22:47:57.534705300Z", - "eventType": "WorkflowExecutionStarted", - "taskId": "1048664", - "workflowExecutionStartedEventAttributes": { - "workflowType": { - "name": "ConnectionManagerWorkflow" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" - } - ] - }, - "workflowExecutionTimeout": "0s", - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "originalExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", - "identity": "1@3de809efb2ed", - "firstExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", - "retryPolicy": { - "initialInterval": "1s", - "backoffCoefficient": 2, - "maximumInterval": "100s", - "maximumAttempts": 1 - }, - "attempt": 1, - "firstWorkflowTaskBackoff": "0s", - "header": {} - } - }, - { - "eventId": "2", - "eventTime": "2022-03-08T22:47:57.534975800Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048665", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "3", - "eventTime": "2022-03-08T22:47:57.563121800Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048669", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "2", - "identity": "1@2741f9c3f558", - "requestId": "e9cf205d-de02-4139-b00d-fab56c4b9fd7" - } - }, - { - "eventId": "4", - "eventTime": "2022-03-08T22:47:57.646973200Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048672", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "2", - "startedEventId": "3", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "5", - "eventTime": "2022-03-08T22:47:57.647029Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048673", - "activityTaskScheduledEventAttributes": { - "activityId": "c45be44b-784b-3a0c-9473-e80129b65969", - "activityType": { - "name": "GetTimeToWait" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "4", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "6", - "eventTime": "2022-03-08T22:47:57.668813100Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048677", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "5", - "identity": "1@2741f9c3f558", - "requestId": "df9001bd-0c42-4415-a631-0a37ee3f7698", - "attempt": 1 - } - }, - { - "eventId": "7", - "eventTime": "2022-03-08T22:47:57.856240400Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048678", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJ0aW1lVG9XYWl0IjowLjB9" - } - ] - }, - "scheduledEventId": "5", - "startedEventId": "6", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "8", - "eventTime": "2022-03-08T22:47:57.856293200Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048679", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "9", - "eventTime": "2022-03-08T22:47:57.876328300Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048683", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "8", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "80932560-7d5d-4f5f-9982-561857b07f50" - } - }, - { - "eventId": "10", - "eventTime": "2022-03-08T22:47:57.921753Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048686", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "8", - "startedEventId": "9", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "11", - "eventTime": "2022-03-08T22:47:57.921814200Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048687", - "activityTaskScheduledEventAttributes": { - "activityId": "c7b20a16-db46-3dd1-b8ac-e2a93d3a8e0d", - "activityType": { - "name": "CreateNewJob" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJyZXNldCI6ZmFsc2V9" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "10", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "12", - "eventTime": "2022-03-08T22:47:57.942341Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048691", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "11", - "identity": "1@2741f9c3f558", - "requestId": "29e77ce9-f715-4f19-9fe2-b5b94201d0b3", - "attempt": 1 - } - }, - { - "eventId": "13", - "eventTime": "2022-03-08T22:47:58.268669700Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048692", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MX0=" - } - ] - }, - "scheduledEventId": "11", - "startedEventId": "12", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "14", - "eventTime": "2022-03-08T22:47:58.268723100Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048693", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "15", - "eventTime": "2022-03-08T22:47:58.283301600Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048697", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "14", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "18d2f139-0794-4dfb-b36d-1448df3eb350" - } - }, - { - "eventId": "16", - "eventTime": "2022-03-08T22:47:58.302388600Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048700", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "14", - "startedEventId": "15", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "17", - "eventTime": "2022-03-08T22:47:58.302431Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048701", - "activityTaskScheduledEventAttributes": { - "activityId": "3c1b8fa0-437b-3bc2-a365-352e9a5d765d", - "activityType": { - "name": "CreateNewAttempt" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "16", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "18", - "eventTime": "2022-03-08T22:47:58.314956300Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048705", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "17", - "identity": "1@2741f9c3f558", - "requestId": "0221b660-4f40-4bcf-9e6a-2a9d5898bb91", - "attempt": 1 - } - }, - { - "eventId": "19", - "eventTime": "2022-03-08T22:47:58.400030800Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048706", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJhdHRlbXB0SWQiOjB9" - } - ] - }, - "scheduledEventId": "17", - "startedEventId": "18", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "20", - "eventTime": "2022-03-08T22:47:58.400072800Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048707", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "21", - "eventTime": "2022-03-08T22:47:58.414415400Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048711", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "20", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "e4110bad-579c-4ac2-a3eb-3836d7d6f841" - } - }, - { - "eventId": "22", - "eventTime": "2022-03-08T22:47:58.431563800Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048714", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "20", - "startedEventId": "21", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "23", - "eventTime": "2022-03-08T22:47:58.431607100Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048715", - "activityTaskScheduledEventAttributes": { - "activityId": "5aa065bf-5ef2-3e24-b560-c6b3c1f4e2bc", - "activityType": { - "name": "GetSyncWorkflowInput" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJhdHRlbXB0SWQiOjAsImpvYklkIjoxLCJyZXNldCI6ZmFsc2V9" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "22", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "24", - "eventTime": "2022-03-08T22:47:58.445218800Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048719", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "23", - "identity": "1@2741f9c3f558", - "requestId": "7165cc86-d137-4b0f-906b-a7e52a1074e4", - "attempt": 1 - } - }, - { - "eventId": "25", - "eventTime": "2022-03-08T22:47:58.471038600Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048720", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JSdW5Db25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowfSwic291cmNlTGF1bmNoZXJDb25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowLCJkb2NrZXJJbWFnZSI6ImFpcmJ5dGUvc291cmNlLXBva2VhcGk6MC4xLjQifSwiZGVzdGluYXRpb25MYXVuY2hlckNvbmZpZyI6eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9LCJzeW5jSW5wdXQiOnsibmFtZXNwYWNlRGVmaW5pdGlvbiI6ImRlc3RpbmF0aW9uIiwibmFtZXNwYWNlRm9ybWF0IjoiJHtTT1VSQ0VfTkFNRVNQQUNFfSIsInByZWZpeCI6IiIsInNvdXJjZUNvbmZpZ3VyYXRpb24iOnsicG9rZW1vbl9uYW1lIjoiZGl0dG8ifSwiZGVzdGluYXRpb25Db25maWd1cmF0aW9uIjp7ImRlc3RpbmF0aW9uX3BhdGgiOiIvdG1wIn0sIm9wZXJhdGlvblNlcXVlbmNlIjpbXSwiY2F0YWxvZyI6eyJzdHJlYW1zIjpbeyJzdHJlYW0iOnsibmFtZSI6InBva2Vtb24iLCJqc29uX3NjaGVtYSI6eyJ0eXBlIjoib2JqZWN0IiwiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInByb3BlcnRpZXMiOnsiaWQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZvcm1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19LCJtb3ZlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJtb3ZlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZ3JvdXBfZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uX2dyb3VwIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImxldmVsX2xlYXJuZWRfYXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibW92ZV9sZWFybl9tZXRob2QiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwib3JkZXIiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3RhdHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJlZmZvcnQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYmFzZV9zdGF0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sInR5cGVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidHlwZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19LCJoZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwid2VpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInNwZWNpZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3ByaXRlcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJiYWNrX3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiYWJpbGl0aWVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYWJpbGl0eSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJpc19oaWRkZW4iOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfX19fSwiaGVsZF9pdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJpdGVtIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJyYXJpdHkiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJpc19kZWZhdWx0ICI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19LCJnYW1lX2luZGljZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJnYW1lX2luZGV4Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sImJhc2VfZXhwZXJpZW5jZSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJsb2NhdGlvbl9hcmVhX2VuY291bnRlcnMiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXX0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6ImFwcGVuZCIsInByaW1hcnlfa2V5IjpbXX1dfSwicmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9LCJzb3VyY2VSZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sImRlc3RpbmF0aW9uUmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9fX0=" - } - ] - }, - "scheduledEventId": "23", - "startedEventId": "24", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "26", - "eventTime": "2022-03-08T22:47:58.471218800Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048721", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "27", - "eventTime": "2022-03-08T22:47:58.485851600Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048725", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "26", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "289a7723-efac-4cfa-bad2-f0c022b27421" - } - }, - { - "eventId": "28", - "eventTime": "2022-03-08T22:47:58.513022200Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048728", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "26", - "startedEventId": "27", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "29", - "eventTime": "2022-03-08T22:47:58.513073500Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048729", - "activityTaskScheduledEventAttributes": { - "activityId": "95686aea-a2ac-3e1e-a512-0790d3a4e95f", - "activityType": { - "name": "ReportJobStart" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "28", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "30", - "eventTime": "2022-03-08T22:47:58.528653400Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048733", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "29", - "identity": "1@2741f9c3f558", - "requestId": "0e4b03aa-2493-4f7d-b832-4e98e13551da", - "attempt": 1 - } - }, - { - "eventId": "31", - "eventTime": "2022-03-08T22:47:58.668827900Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048734", - "activityTaskCompletedEventAttributes": { - "scheduledEventId": "29", - "startedEventId": "30", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "32", - "eventTime": "2022-03-08T22:47:58.668874600Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048735", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "33", - "eventTime": "2022-03-08T22:47:58.682929200Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048739", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "32", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "caa61b0a-5d34-48a8-ab16-997d3ba9eab5" - } - }, - { - "eventId": "34", - "eventTime": "2022-03-08T22:47:58.735519100Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048742", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "32", - "startedEventId": "33", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "35", - "eventTime": "2022-03-08T22:47:58.735579100Z", - "eventType": "MarkerRecorded", - "taskId": "1048743", - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "InRhc2tfcXVldWVfY2hhbmdlX2Zyb21fY29ubmVjdGlvbl91cGRhdGVyX3RvX3N5bmMi" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "34" - } - }, - { - "eventId": "36", - "eventTime": "2022-03-08T22:47:58.735800Z", - "eventType": "StartChildWorkflowExecutionInitiated", - "taskId": "1048744", - "startChildWorkflowExecutionInitiatedEventAttributes": { - "namespace": "default", - "workflowId": "sync_1", - "workflowType": { - "name": "SyncWorkflow" - }, - "taskQueue": { - "name": "SYNC" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjB9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9zb3VyY2UtcG9rZWFwaTowLjEuNCJ9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJuYW1lc3BhY2VEZWZpbml0aW9uIjoiZGVzdGluYXRpb24iLCJuYW1lc3BhY2VGb3JtYXQiOiIke1NPVVJDRV9OQU1FU1BBQ0V9IiwicHJlZml4IjoiIiwic291cmNlQ29uZmlndXJhdGlvbiI6eyJwb2tlbW9uX25hbWUiOiJkaXR0byJ9LCJkZXN0aW5hdGlvbkNvbmZpZ3VyYXRpb24iOnsiZGVzdGluYXRpb25fcGF0aCI6Ii90bXAifSwib3BlcmF0aW9uU2VxdWVuY2UiOltdLCJjYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJyZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sInNvdXJjZVJlc291cmNlUmVxdWlyZW1lbnRzIjp7fSwiZGVzdGluYXRpb25SZXNvdXJjZVJlcXVpcmVtZW50cyI6e319" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ImI5MTZmZDg2LWE1YTYtNDhhYy04ODgwLTQ5Nzg3NGNmNTNjZiI=" - } - ] - }, - "workflowExecutionTimeout": "0s", - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "parentClosePolicy": "RequestCancel", - "workflowTaskCompletedEventId": "34", - "workflowIdReusePolicy": "AllowDuplicate", - "header": {} - } - }, - { - "eventId": "37", - "eventTime": "2022-03-08T22:47:58.762930500Z", - "eventType": "ChildWorkflowExecutionStarted", - "taskId": "1048747", - "childWorkflowExecutionStartedEventAttributes": { - "namespace": "default", - "initiatedEventId": "36", - "workflowExecution": { - "workflowId": "sync_1", - "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" - }, - "workflowType": { - "name": "SyncWorkflow" - }, - "header": {} - } - }, - { - "eventId": "38", - "eventTime": "2022-03-08T22:47:58.762971900Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048748", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "39", - "eventTime": "2022-03-08T22:47:58.774051900Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048752", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "38", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "ca98144b-47f8-486d-b260-2e1dc42bd68e" - } - }, - { - "eventId": "40", - "eventTime": "2022-03-08T22:47:58.794905100Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048755", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "38", - "startedEventId": "39", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "41", - "eventTime": "2022-03-08T22:48:02.408058300Z", - "eventType": "ChildWorkflowExecutionCompleted", - "taskId": "1048757", - "childWorkflowExecutionCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzdGFuZGFyZFN5bmNTdW1tYXJ5Ijp7InN0YXR1cyI6ImNvbXBsZXRlZCIsInJlY29yZHNTeW5jZWQiOjEsImJ5dGVzU3luY2VkIjoyMjcxNSwic3RhcnRUaW1lIjoxNjQ2Nzc5Njc4OTM5LCJlbmRUaW1lIjoxNjQ2Nzc5NjgyMjM4LCJ0b3RhbFN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJzdGF0ZU1lc3NhZ2VzRW1pdHRlZCI6MCwicmVjb3Jkc0NvbW1pdHRlZCI6MX0sInN0cmVhbVN0YXRzIjpbeyJzdHJlYW1OYW1lIjoicG9rZW1vbiIsInN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJyZWNvcmRzQ29tbWl0dGVkIjoxfX1dfSwib3V0cHV0X2NhdGFsb2ciOnsic3RyZWFtcyI6W3sic3RyZWFtIjp7Im5hbWUiOiJwb2tlbW9uIiwianNvbl9zY2hlbWEiOnsidHlwZSI6Im9iamVjdCIsIiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJwcm9wZXJ0aWVzIjp7ImlkIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmb3JtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fSwibW92ZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsibW92ZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2dyb3VwX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbl9ncm91cCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJsZXZlbF9sZWFybmVkX2F0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm1vdmVfbGVhcm5fbWV0aG9kIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sIm9yZGVyIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInN0YXRzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InN0YXQiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZWZmb3J0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImJhc2Vfc3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJ0eXBlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInR5cGUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fSwiaGVpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIndlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzcGVjaWVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInNwcml0ZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiYmFja19zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImFiaWxpdGllcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImFiaWxpdHkiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiaXNfaGlkZGVuIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX19fX0sImhlbGRfaXRlbXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiaXRlbSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsicmFyaXR5Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwiaXNfZGVmYXVsdCAiOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfSwiZ2FtZV9pbmRpY2VzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZ2FtZV9pbmRleCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJiYXNlX2V4cGVyaWVuY2UiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibG9jYXRpb25fYXJlYV9lbmNvdW50ZXJzIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W119LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJhcHBlbmQiLCJwcmltYXJ5X2tleSI6W119XX0sImZhaWx1cmVzIjpbXX0=" - } - ] - }, - "namespace": "default", - "workflowExecution": { - "workflowId": "sync_1", - "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" - }, - "workflowType": { - "name": "SyncWorkflow" - }, - "initiatedEventId": "36", - "startedEventId": "37" - } - }, - { - "eventId": "42", - "eventTime": "2022-03-08T22:48:02.408127200Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048758", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "43", - "eventTime": "2022-03-08T22:48:02.422112800Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048762", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "42", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "c0916d42-c83c-4e9d-805a-29ca5f979624" - } - }, - { - "eventId": "44", - "eventTime": "2022-03-08T22:48:02.454203Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048765", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "42", - "startedEventId": "43", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "45", - "eventTime": "2022-03-08T22:48:02.454256Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048766", - "activityTaskScheduledEventAttributes": { - "activityId": "b169a729-47bc-38f7-a315-c1a4b6d96466", - "activityType": { - "name": "JobSuccess" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MSwiYXR0ZW1wdElkIjowLCJzdGFuZGFyZFN5bmNPdXRwdXQiOnsic3RhbmRhcmRTeW5jU3VtbWFyeSI6eyJzdGF0dXMiOiJjb21wbGV0ZWQiLCJyZWNvcmRzU3luY2VkIjoxLCJieXRlc1N5bmNlZCI6MjI3MTUsInN0YXJ0VGltZSI6MTY0Njc3OTY3ODkzOSwiZW5kVGltZSI6MTY0Njc3OTY4MjIzOCwidG90YWxTdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwic3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjAsInJlY29yZHNDb21taXR0ZWQiOjF9LCJzdHJlYW1TdGF0cyI6W3sic3RyZWFtTmFtZSI6InBva2Vtb24iLCJzdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwicmVjb3Jkc0NvbW1pdHRlZCI6MX19XX0sIm91dHB1dF9jYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJmYWlsdXJlcyI6W119fQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "44", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "46", - "eventTime": "2022-03-08T22:48:02.437049800Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048770", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "45", - "identity": "1@2741f9c3f558", - "requestId": "9d2fc180-ec33-42a2-a259-d29afb281992", - "attempt": 1 - } - }, - { - "eventId": "47", - "eventTime": "2022-03-08T22:48:02.664164100Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048771", - "activityTaskCompletedEventAttributes": { - "scheduledEventId": "45", - "startedEventId": "46", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "48", - "eventTime": "2022-03-08T22:48:02.664217700Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048772", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "49", - "eventTime": "2022-03-08T22:48:02.676895300Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048776", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "48", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "b43c8f30-2500-47d6-a8f6-aa2cd0d99218" - } - }, - { - "eventId": "50", - "eventTime": "2022-03-08T22:48:02.709745Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048779", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "48", - "startedEventId": "49", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "51", - "eventTime": "2022-03-08T22:48:02.709811400Z", - "eventType": "WorkflowExecutionContinuedAsNew", - "taskId": "1048780", - "workflowExecutionContinuedAsNewEventAttributes": { - "newExecutionRunId": "e81cf38b-7f11-4eeb-8c85-301778bf2671", - "workflowType": { - "name": "ConnectionManagerWorkflow" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" - } - ] - }, - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "workflowTaskCompletedEventId": "50", - "header": {} - } - } - ] -} From 9b3abe8777d6eb0e71e548cf503ef58f77f667ee Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Fri, 11 Mar 2022 04:04:16 +0100 Subject: [PATCH 16/38] Bump Airbyte version from 0.35.50-alpha to 0.35.51-alpha (#11059) Co-authored-by: benmoriceau --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 4 ++-- airbyte-container-orchestrator/Dockerfile | 6 +++--- airbyte-metrics/reporter/Dockerfile | 4 ++-- airbyte-scheduler/app/Dockerfile | 4 ++-- airbyte-server/Dockerfile | 4 ++-- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 4 ++-- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ 18 files changed, 44 insertions(+), 44 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9d3a7df07143..7fc133cf5a4c 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.35.50-alpha +current_version = 0.35.51-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index b79e0cdfd6b3..32e477215504 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.35.50-alpha +VERSION=0.35.51-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 1c10e18c14ff..640d9779d1ae 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -5,6 +5,6 @@ ENV APPLICATION airbyte-bootloader WORKDIR /app -ADD bin/${APPLICATION}-0.35.50-alpha.tar /app +ADD bin/${APPLICATION}-0.35.51-alpha.tar /app -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 50091530078f..a47fe4c6757b 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,12 +26,12 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl ENV APPLICATION airbyte-container-orchestrator -ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}" +ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}" WORKDIR /app # Move orchestrator app -ADD bin/${APPLICATION}-0.35.50-alpha.tar /app +ADD bin/${APPLICATION}-0.35.51-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 2c9475ccc932..616a009d13a1 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-metrics-reporter WORKDIR /app -ADD bin/${APPLICATION}-0.35.50-alpha.tar /app +ADD bin/${APPLICATION}-0.35.51-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index dadb5c151749..4ede9a045cde 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-scheduler WORKDIR /app -ADD bin/${APPLICATION}-0.35.50-alpha.tar /app +ADD bin/${APPLICATION}-0.35.51-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index eb6a7a171365..d7c48f467f5b 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -7,7 +7,7 @@ ENV APPLICATION airbyte-server WORKDIR /app -ADD bin/${APPLICATION}-0.35.50-alpha.tar /app +ADD bin/${APPLICATION}-0.35.51-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index da97c2d1aa49..b00b7e17d959 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.35.50-alpha", + "version": "0.35.51-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.35.50-alpha", + "version": "0.35.51-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^1.2.36", "@fortawesome/free-brands-svg-icons": "^5.15.4", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index d6d824c38f0b..23bbbd4a63f2 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.35.50-alpha", + "version": "0.35.51-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index db067476c928..59060d50da4b 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -30,7 +30,7 @@ ENV APPLICATION airbyte-workers WORKDIR /app # Move worker app -ADD bin/${APPLICATION}-0.35.50-alpha.tar /app +ADD bin/${APPLICATION}-0.35.51-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.50-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 1e03fa499221..84ad43a50c88 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.35.50-alpha" +appVersion: "0.35.51-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 08fb170665fd..f1b49197621b 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -29,7 +29,7 @@ | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -71,7 +71,7 @@ | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -118,7 +118,7 @@ | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -156,7 +156,7 @@ | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -188,7 +188,7 @@ | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.50-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 9dc4596790d5..bb240d21b623 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.35.50-alpha + tag: 0.35.51-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.35.50-alpha + tag: 0.35.51-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.35.50-alpha + tag: 0.35.51-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.35.50-alpha + tag: 0.35.51-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.35.50-alpha + tag: 0.35.51-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 5937fecf675d..2a58b293dc54 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -101,7 +101,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.35.50-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.35.51-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 80c00753be3c..392ba4be0c50 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.50-alpha +AIRBYTE_VERSION=0.35.51-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 5a472f4b17d1..f55592079ba4 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/bootloader - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/scheduler - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/server - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/webapp - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/worker - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 402bf99c1ed6..a052768e4249 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.50-alpha +AIRBYTE_VERSION=0.35.51-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 493973411812..80461932255b 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/bootloader - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/scheduler - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/server - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/webapp - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: airbyte/worker - newTag: 0.35.50-alpha + newTag: 0.35.51-alpha - name: temporalio/auto-setup newTag: 1.7.0 From b6ad7601b03817d3f1acf7ad9a79ac5b324f3694 Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Fri, 11 Mar 2022 21:20:49 +0800 Subject: [PATCH 17/38] Add more cloud metrics. (#10956) Implement a metric to track the percentile of active connection per workspace. --- .../io/airbyte/metrics/lib/MetricQueries.java | 33 +++++- .../io/airbyte/metrics/lib/MetricTags.java | 7 +- .../airbyte/metrics/lib/MetricsRegistry.java | 10 +- .../metrics/lib/MetrisQueriesTest.java | 106 ++++++++++++++++++ airbyte-metrics/reporter/build.gradle | 1 + .../airbyte/metrics/reporter/ReporterApp.java | 14 +++ 6 files changed, 162 insertions(+), 9 deletions(-) diff --git a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java index b80dc3ca462c..784ded4922b7 100644 --- a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java +++ b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricQueries.java @@ -10,9 +10,12 @@ import io.airbyte.db.instance.configs.jooq.enums.ReleaseStage; import io.airbyte.db.instance.jobs.jooq.enums.JobStatus; +import java.util.ArrayList; import java.util.List; import java.util.UUID; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; import org.jooq.DSLContext; /** @@ -71,18 +74,18 @@ private static Long oldestJobAgeSecs(final DSLContext ctx, final JobStatus statu final var readableTimeField = "run_duration"; final var durationSecField = "run_duration_secs"; final var query = String.format(""" - with - oldest_job as ( + WITH + oldest_job AS ( SELECT id, age(current_timestamp, created_at) AS %s FROM jobs WHERE status = '%s' ORDER BY run_duration DESC LIMIT 1) - select id, + SELECT id, run_duration, extract(epoch from run_duration) as %s - from oldest_job""", readableTimeField, status.getLiteral(), durationSecField); + FROM oldest_job""", readableTimeField, status.getLiteral(), durationSecField); final var res = ctx.fetch(query); // unfortunately there are no good Jooq methods for retrieving a single record of a single column // forcing the List cast. @@ -101,7 +104,7 @@ oldest_job as ( } public static List numberOfActiveConnPerWorkspace(DSLContext ctx) { - var countField = "num_conn"; + final var countField = "num_conn"; final var query = String.format(""" SELECT workspace_id, count(c.id) as %s FROM actor @@ -114,4 +117,24 @@ SELECT workspace_id, count(c.id) as %s return ctx.fetch(query).getValues(countField, long.class); } + public static List> overallJobRuntimeForTerminalJobsInLastHour(DSLContext ctx) { + final var statusField = "status"; + final var timeField = "sec"; + final var query = + String.format(""" + SELECT %s, extract(epoch from age(updated_at, created_at)) AS %s FROM jobs + WHERE updated_at >= NOW() - INTERVAL '1 HOUR' + AND (jobs.status = 'failed' OR jobs.status = 'succeeded' OR jobs.status = 'cancelled');""", statusField, timeField); + final var statuses = ctx.fetch(query).getValues(statusField, JobStatus.class); + final var times = ctx.fetch(query).getValues(timeField, double.class); + + final var pairedRes = new ArrayList>(); + for (int i = 0; i < statuses.size(); i++) { + final var pair = new ImmutablePair<>(statuses.get(i), times.get(i)); + pairedRes.add(pair); + } + + return pairedRes; + } + } diff --git a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java index 34728cb93887..ab8b05387a1d 100644 --- a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java +++ b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java @@ -6,6 +6,7 @@ import io.airbyte.config.FailureReason.FailureOrigin; import io.airbyte.db.instance.configs.jooq.enums.ReleaseStage; +import io.airbyte.db.instance.jobs.jooq.enums.JobStatus; /** * Keep track of all metric tags. @@ -13,8 +14,8 @@ public class MetricTags { private static final String RELEASE_STAGE = "release_stage"; - private static final String FAILURE_ORIGIN = "failure_origin"; + private static final String JOB_STATUS = "job_status"; public static String getReleaseStage(final ReleaseStage stage) { return tagDelimit(RELEASE_STAGE, stage.getLiteral()); @@ -24,6 +25,10 @@ public static String getFailureOrigin(final FailureOrigin origin) { return tagDelimit(FAILURE_ORIGIN, origin.value()); } + public static String getJobStatus(final JobStatus status) { + return tagDelimit(JOB_STATUS, status.getLiteral()); + } + private static String tagDelimit(final String tagName, final String tagVal) { return String.join(":", tagName, tagVal); } diff --git a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java index ed8f88db612c..936eeb5ac3a1 100644 --- a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java +++ b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java @@ -25,8 +25,9 @@ *

* - Avoid numbers. This makes the metric confusing to read. Numbers should only be used as a *

- * - Add units at name end if applicable. This is especially relevant for time units. versioning - * tactic and present at the end of the metric. + * - Add units at name end if applicable. This is especially relevant for time units. + *

+ * - Include the time period in the name if the metric is meant to be run at a certain interval. */ public enum MetricsRegistry { @@ -83,7 +84,10 @@ public enum MetricsRegistry { "oldest pending job in seconds"), OLDEST_RUNNING_JOB_AGE_SECS(MetricEmittingApps.METRICS_REPORTER, "oldest_running_job_age_secs", - "oldest running job in seconds"); + "oldest running job in seconds"), + OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS(MetricEmittingApps.METRICS_REPORTER, + "overall_job_runtime_in_last_hour_by_terminal_state_secs", + "overall job runtime - scheduling and execution for all attempts - for jobs that reach terminal states in the last hour. tagged by terminal states."); public final MetricEmittingApp application; public final String metricName; diff --git a/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetrisQueriesTest.java b/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetrisQueriesTest.java index f92f2c5bfa39..0dd5496868bd 100644 --- a/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetrisQueriesTest.java +++ b/airbyte-metrics/lib/src/test/java/io/airbyte/metrics/lib/MetrisQueriesTest.java @@ -15,6 +15,7 @@ import static io.airbyte.db.instance.configs.jooq.Tables.WORKSPACE; import static io.airbyte.db.instance.jobs.jooq.Tables.*; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import io.airbyte.db.Database; import io.airbyte.db.instance.configs.jooq.enums.ActorType; @@ -29,6 +30,7 @@ import java.time.temporal.ChronoUnit; import java.util.List; import java.util.UUID; +import org.apache.commons.lang3.tuple.ImmutablePair; import org.jooq.JSONB; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -429,4 +431,108 @@ void shouldReturnNothingIfNotApplicable() throws SQLException { } + @Nested + class overallJobRuntimeForTerminalJobsInLastHour { + + @AfterEach + void tearDown() throws SQLException { + configDb.transaction(ctx -> ctx.truncate(JOBS).cascade().execute()); + } + + @Test + @DisplayName("should ignore non terminal jobs") + void shouldIgnoreNonTerminalJobs() throws SQLException { + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(1L, "", JobStatus.running).execute()); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(2L, "", JobStatus.incomplete).execute()); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(3L, "", JobStatus.pending).execute()); + + final var res = configDb.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); + assertEquals(0, res.size()); + } + + @Test + @DisplayName("should ignore jobs older than 1 hour") + void shouldIgnoreJobsOlderThan1Hour() throws SQLException { + final var updateAt = OffsetDateTime.now().minus(2, ChronoUnit.HOURS); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.UPDATED_AT).values(1L, "", JobStatus.succeeded, updateAt).execute()); + + final var res = configDb.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); + assertEquals(0, res.size()); + } + + @Test + @DisplayName("should return correct duration for terminal jobs") + void shouldReturnTerminalJobs() throws SQLException { + final var updateAt = OffsetDateTime.now(); + final var expAgeSecs = 10000; + final var createAt = updateAt.minus(expAgeSecs, ChronoUnit.SECONDS); + + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) + .values(1L, "", JobStatus.succeeded, createAt, updateAt).execute()); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) + .values(2L, "", JobStatus.failed, createAt, updateAt).execute()); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) + .values(3L, "", JobStatus.cancelled, createAt, updateAt).execute()); + + final var res = configDb.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); + assertEquals(3, res.size()); + + final var exp = List.of( + new ImmutablePair<>(JobStatus.succeeded, expAgeSecs * 1.0), + new ImmutablePair<>(JobStatus.cancelled, expAgeSecs * 1.0), + new ImmutablePair<>(JobStatus.failed, expAgeSecs * 1.0)); + assertTrue(res.containsAll(exp) && exp.containsAll(res)); + } + + @Test + @DisplayName("should return correct duration for jobs that terminated in the last hour") + void shouldReturnTerminalJobsComplex() throws SQLException { + final var updateAtNow = OffsetDateTime.now(); + final var expAgeSecs = 10000; + final var createAt = updateAtNow.minus(expAgeSecs, ChronoUnit.SECONDS); + + // terminal jobs in last hour + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) + .values(1L, "", JobStatus.succeeded, createAt, updateAtNow).execute()); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) + .values(2L, "", JobStatus.failed, createAt, updateAtNow).execute()); + + // old terminal jobs + final var updateAtOld = OffsetDateTime.now().minus(2, ChronoUnit.HOURS); + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT, JOBS.UPDATED_AT) + .values(3L, "", JobStatus.cancelled, createAt, updateAtOld).execute()); + + // non-terminal jobs + configDb.transaction( + ctx -> ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS, JOBS.CREATED_AT) + .values(4L, "", JobStatus.running, createAt).execute()); + + final var res = configDb.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); + assertEquals(2, res.size()); + + final var exp = List.of( + new ImmutablePair<>(JobStatus.succeeded, expAgeSecs * 1.0), + new ImmutablePair<>(JobStatus.failed, expAgeSecs * 1.0)); + assertTrue(res.containsAll(exp) && exp.containsAll(res)); + } + + @Test + @DisplayName("should not error out or return any result if not applicable") + void shouldReturnNothingIfNotApplicable() throws SQLException { + final var res = configDb.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); + assertEquals(0, res.size()); + } + + } + } diff --git a/airbyte-metrics/reporter/build.gradle b/airbyte-metrics/reporter/build.gradle index cc1655ab6a4e..ec1e02fccff0 100644 --- a/airbyte-metrics/reporter/build.gradle +++ b/airbyte-metrics/reporter/build.gradle @@ -4,6 +4,7 @@ plugins { dependencies { implementation project(':airbyte-config:models') + implementation project(':airbyte-db:jooq') implementation project(':airbyte-db:lib') implementation project(':airbyte-metrics:lib') } diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java index 1a2e54a3b5fe..afba7778d572 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java @@ -8,16 +8,19 @@ import io.airbyte.config.EnvConfigs; import io.airbyte.db.Database; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; +import io.airbyte.db.instance.jobs.jooq.enums.JobStatus; import io.airbyte.metrics.lib.DatadogClientConfiguration; import io.airbyte.metrics.lib.DogStatsDMetricSingleton; import io.airbyte.metrics.lib.MetricEmittingApps; import io.airbyte.metrics.lib.MetricQueries; +import io.airbyte.metrics.lib.MetricTags; import io.airbyte.metrics.lib.MetricsRegistry; import java.io.IOException; import java.sql.SQLException; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.tuple.Pair; @Slf4j public class ReporterApp { @@ -78,6 +81,17 @@ public static void main(final String[] args) throws IOException, InterruptedExce e.printStackTrace(); } }, 0, 15, TimeUnit.SECONDS); + pollers.scheduleAtFixedRate(() -> { + try { + final var times = configDatabase.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); + for (Pair pair : times) { + DogStatsDMetricSingleton.recordTimeGlobal( + MetricsRegistry.OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS, pair.getRight(), MetricTags.getJobStatus(pair.getLeft())); + } + } catch (final SQLException e) { + e.printStackTrace(); + } + }, 0, 1, TimeUnit.HOURS); Thread.sleep(1000_000 * 1000); } From 366d1a090079728f4a6e04e97b4640fa6c73a087 Mon Sep 17 00:00:00 2001 From: Christophe Duong Date: Fri, 11 Mar 2022 15:29:12 +0100 Subject: [PATCH 18/38] Refactor Snowflake internal Staging as a base class for other staging classes (#10865) * Refactor Snowflake internal Staging as model to share staging abilities in jdbc destinations --- .../destination/ExtendedNameTransformer.java | 4 - .../NamingConventionTransformer.java | 4 + .../destination/StandardNameTransformer.java | 8 +- .../destination.py.hbs | 12 +-- .../destination-python/setup.py | 4 +- .../ClickhouseSQLNameTransformer.java | 2 +- .../databricks/DatabricksNameTransformer.java | 2 +- .../destination/jdbc/WriteConfig.java | 34 +++++++ .../staging/StagingConsumerFactory.java} | 92 +++++++++---------- .../staging/StagingOperations.java | 43 +++++++++ .../MariadbColumnstoreNameTransformer.java | 2 +- .../mongodb/MongodbNameTransformer.java | 2 +- .../mssql/MSSQLNameTransformer.java | 2 +- .../mysql/MySQLNameTransformer.java | 2 +- .../oracle/OracleNameTransformer.java | 2 +- .../postgres/PostgresSQLNameTransformer.java | 2 +- .../rockset/RocksetSQLNameTransformer.java | 2 +- .../destination/s3/S3DestinationConfig.java | 30 +++++- .../s3/avro/AvroNameTransformer.java | 2 +- .../destination.py | 12 +-- .../setup.py | 4 +- .../destination-snowflake/README.md | 2 +- .../SnowflakeInternalStagingDestination.java | 28 +++--- ...nowflakeInternalStagingSqlOperations.java} | 40 +++++++- .../SnowflakeSQLNameTransformer.java | 10 +- .../snowflake/SnowflakeDestinationTest.java | 12 ++- .../source-hubspot/source_hubspot/streams.py | 2 +- .../connectors/source-recurly/.python-version | 1 - .../integration_tests/configured_catalog.json | 2 +- .../source_recurly/schemas/accounts.json | 82 ++++++++--------- .../source_recurly/schemas/subscriptions.json | 4 +- .../source_recurly/schemas/transactions.json | 6 +- .../source-recurly/source_recurly/spec.json | 1 - .../integration_tests/state.json | 2 +- .../source-shopify/source_shopify/source.py | 4 +- .../SnowflakeSourceOperations.java | 1 - .../workers/DefaultReplicationWorker.java | 5 +- 37 files changed, 298 insertions(+), 171 deletions(-) rename airbyte-integrations/connectors/{destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingConsumerFactory.java => destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java} (71%) create mode 100644 airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java rename airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/{SnowflakeStagingSqlOperations.java => SnowflakeInternalStagingSqlOperations.java} (65%) delete mode 100644 airbyte-integrations/connectors/source-recurly/.python-version diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/ExtendedNameTransformer.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/ExtendedNameTransformer.java index b278af6e5a0e..6af8c0bb97fa 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/ExtendedNameTransformer.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/ExtendedNameTransformer.java @@ -27,10 +27,6 @@ protected String disabled_convertStreamName(final String input) { } } - protected String applyDefaultCase(final String input) { - return input; - } - protected boolean useExtendedIdentifiers(final String input) { boolean result = false; if (input.matches("[^\\p{Alpha}_].*")) { diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/NamingConventionTransformer.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/NamingConventionTransformer.java index 636bafe86a5c..1069f6f4c2b2 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/NamingConventionTransformer.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/NamingConventionTransformer.java @@ -45,4 +45,8 @@ public interface NamingConventionTransformer { @Deprecated String getTmpTableName(String name); + String convertStreamName(final String input); + + String applyDefaultCase(final String input); + } diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/StandardNameTransformer.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/StandardNameTransformer.java index 41659b5415c6..9175ee5e11b2 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/StandardNameTransformer.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/StandardNameTransformer.java @@ -32,10 +32,16 @@ public String getTmpTableName(final String streamName) { return convertStreamName(Strings.addRandomSuffix("_airbyte_tmp", "_", 3) + "_" + streamName); } - protected String convertStreamName(final String input) { + @Override + public String convertStreamName(final String input) { return Names.toAlphanumericAndUnderscore(input); } + @Override + public String applyDefaultCase(final String input) { + return input; + } + /** * Rebuild a JsonNode adding sanitized property names (a subset of special characters replaced by * underscores) while keeping original property names too. This is needed by some destinations as diff --git a/airbyte-integrations/connector-templates/destination-python/destination_{{snakeCase name}}/destination.py.hbs b/airbyte-integrations/connector-templates/destination-python/destination_{{snakeCase name}}/destination.py.hbs index 0a12fa316e9e..fc7a7f95566d 100644 --- a/airbyte-integrations/connector-templates/destination-python/destination_{{snakeCase name}}/destination.py.hbs +++ b/airbyte-integrations/connector-templates/destination-python/destination_{{snakeCase name}}/destination.py.hbs @@ -3,19 +3,16 @@ # -from typing import Mapping, Any, Iterable +from typing import Any, Iterable, Mapping from airbyte_cdk import AirbyteLogger from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, ConfiguredAirbyteCatalog, AirbyteMessage, Status +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status class Destination{{properCase name}}(Destination): def write( - self, - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - input_messages: Iterable[AirbyteMessage] + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] ) -> Iterable[AirbyteMessage]: """ @@ -54,6 +51,3 @@ class Destination{{properCase name}}(Destination): return AirbyteConnectionStatus(status=Status.SUCCEEDED) except Exception as e: return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") - - - diff --git a/airbyte-integrations/connector-templates/destination-python/setup.py b/airbyte-integrations/connector-templates/destination-python/setup.py index 95d9de4b70ba..7dbabcc48bcb 100644 --- a/airbyte-integrations/connector-templates/destination-python/setup.py +++ b/airbyte-integrations/connector-templates/destination-python/setup.py @@ -9,9 +9,7 @@ "airbyte-cdk", ] -TEST_REQUIREMENTS = [ - "pytest~=6.1" -] +TEST_REQUIREMENTS = ["pytest~=6.1"] setup( name="destination_{{snakeCase name}}", diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseSQLNameTransformer.java b/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseSQLNameTransformer.java index fca0dc91e413..c5e277c5e28e 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseSQLNameTransformer.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseSQLNameTransformer.java @@ -9,7 +9,7 @@ public class ClickhouseSQLNameTransformer extends ExtendedNameTransformer { @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksNameTransformer.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksNameTransformer.java index dd901f986916..a648c0f77ad9 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksNameTransformer.java +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksNameTransformer.java @@ -29,7 +29,7 @@ public String getRawTableName(final String streamName) { } @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/WriteConfig.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/WriteConfig.java index 4bbe64bc5132..ef0bbfbf3715 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/WriteConfig.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/WriteConfig.java @@ -5,6 +5,10 @@ package io.airbyte.integrations.destination.jdbc; import io.airbyte.protocol.models.DestinationSyncMode; +import java.util.ArrayList; +import java.util.List; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; /** * Write configuration POJO for all destinations extending {@link AbstractJdbcDestination}. @@ -19,6 +23,8 @@ public class WriteConfig { private final String tmpTableName; private final String outputTableName; private final DestinationSyncMode syncMode; + private final DateTime writeDatetime; + private final List stagedFiles; public WriteConfig(final String streamName, final String namespace, @@ -26,12 +32,24 @@ public WriteConfig(final String streamName, final String tmpTableName, final String outputTableName, final DestinationSyncMode syncMode) { + this(streamName, namespace, outputSchemaName, tmpTableName, outputTableName, syncMode, DateTime.now(DateTimeZone.UTC)); + } + + public WriteConfig(final String streamName, + final String namespace, + final String outputSchemaName, + final String tmpTableName, + final String outputTableName, + final DestinationSyncMode syncMode, + final DateTime writeDatetime) { this.streamName = streamName; this.namespace = namespace; this.outputSchemaName = outputSchemaName; this.tmpTableName = tmpTableName; this.outputTableName = outputTableName; this.syncMode = syncMode; + this.stagedFiles = new ArrayList<>(); + this.writeDatetime = writeDatetime; } public String getStreamName() { @@ -58,6 +76,22 @@ public DestinationSyncMode getSyncMode() { return syncMode; } + public DateTime getWriteDatetime() { + return writeDatetime; + } + + public List getStagedFiles() { + return stagedFiles; + } + + public void addStagedFile(final String file) { + stagedFiles.add(file); + } + + public void clearStagedFiles() { + stagedFiles.clear(); + } + @Override public String toString() { return "WriteConfig{" + diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java similarity index 71% rename from airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingConsumerFactory.java rename to airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java index 79df935ad712..ebdc0c0d2356 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java @@ -2,7 +2,7 @@ * Copyright (c) 2021 Airbyte, Inc., all rights reserved. */ -package io.airbyte.integrations.destination.snowflake; +package io.airbyte.integrations.destination.staging; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Preconditions; @@ -16,7 +16,6 @@ import io.airbyte.integrations.destination.buffered_stream_consumer.OnCloseFunction; import io.airbyte.integrations.destination.buffered_stream_consumer.OnStartFunction; import io.airbyte.integrations.destination.buffered_stream_consumer.RecordWriter; -import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.WriteConfig; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; @@ -30,38 +29,40 @@ import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Snowflake Internal Staging consists of 4 main parts - * - * CREATE STAGE @TEMP_STAGE_NAME -- Creates a new named internal stage to use for loading data from - * files into Snowflake tables and unloading data from tables into files PUT - * file://local/ @TEMP_STAGE_NAME. --JDBC Driver will upload the files into stage - * COPY FROM @TEMP_STAGE_NAME -- Loads data from staged files to an existing table. - * DROP @TEMP_STAGE_NAME -- Drop temporary stage after sync - */ -public class SnowflakeInternalStagingConsumerFactory { +public class StagingConsumerFactory { - private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeInternalStagingConsumerFactory.class); + private static final Logger LOGGER = LoggerFactory.getLogger(StagingConsumerFactory.class); private static final long MAX_BATCH_SIZE_BYTES = 128 * 1024 * 1024; // 128mb - private final String CURRENT_SYNC_PATH = UUID.randomUUID().toString(); + private final DateTime CURRENT_SYNC_PATH = DateTime.now(DateTimeZone.UTC); + // using a random string here as a placeholder for the moment. + // This would avoid mixing data in the staging area between different syncs (especially if they + // manipulate streams with similar names) + // if we replaced the random connection id by the actual connection_id, we'd gain the opportunity to + // leverage data that was uploaded to stage + // in a previous attempt but failed to load to the warehouse for some reason (interrupted?) instead. + // This would also allow other programs/scripts + // to load (or reload backups?) in the connection's staging area to be loaded at the next sync. + private final String RANDOM_CONNECTION_ID = UUID.randomUUID().toString(); public AirbyteMessageConsumer create(final Consumer outputRecordCollector, final JdbcDatabase database, - final SnowflakeStagingSqlOperations sqlOperations, - final SnowflakeSQLNameTransformer namingResolver, + final StagingOperations sqlOperations, + final NamingConventionTransformer namingResolver, final JsonNode config, final ConfiguredAirbyteCatalog catalog) { final List writeConfigs = createWriteConfigs(namingResolver, config, catalog); return new BufferedStreamConsumer( outputRecordCollector, - onStartFunction(database, sqlOperations, writeConfigs, namingResolver), - recordWriterFunction(database, sqlOperations, writeConfigs, catalog, namingResolver), - onCloseFunction(database, sqlOperations, writeConfigs, namingResolver), + onStartFunction(database, sqlOperations, writeConfigs), + recordWriterFunction(database, sqlOperations, writeConfigs, catalog), + onCloseFunction(database, sqlOperations, writeConfigs), catalog, sqlOperations::isValidData, MAX_BATCH_SIZE_BYTES); @@ -74,8 +75,7 @@ private static List createWriteConfigs(final NamingConventionTransf return catalog.getStreams().stream().map(toWriteConfig(namingResolver, config)).collect(Collectors.toList()); } - private static Function toWriteConfig( - final NamingConventionTransformer namingResolver, + private static Function toWriteConfig(final NamingConventionTransformer namingResolver, final JsonNode config) { return stream -> { Preconditions.checkNotNull(stream.getDestinationSyncMode(), "Undefined destination sync mode"); @@ -104,9 +104,8 @@ private static String getOutputSchema(final AirbyteStream stream, } private static OnStartFunction onStartFunction(final JdbcDatabase database, - final SnowflakeStagingSqlOperations snowflakeSqlOperations, - final List writeConfigs, - final SnowflakeSQLNameTransformer namingResolver) { + final StagingOperations stagingOperations, + final List writeConfigs) { return () -> { LOGGER.info("Preparing tmp tables in destination started for {} streams", writeConfigs.size()); @@ -114,16 +113,16 @@ private static OnStartFunction onStartFunction(final JdbcDatabase database, final String schema = writeConfig.getOutputSchemaName(); final String stream = writeConfig.getStreamName(); final String tmpTable = writeConfig.getTmpTableName(); - final String stage = namingResolver.getStageName(schema, writeConfig.getOutputTableName()); + final String stage = stagingOperations.getStageName(schema, writeConfig.getOutputTableName()); LOGGER.info("Preparing stage in destination started for schema {} stream {}: tmp table: {}, stage: {}", schema, stream, tmpTable, stage); AirbyteSentry.executeWithTracing("PrepareStreamStage", () -> { - snowflakeSqlOperations.createSchemaIfNotExists(database, schema); - snowflakeSqlOperations.createTableIfNotExists(database, schema, tmpTable); - snowflakeSqlOperations.createStageIfNotExists(database, stage); + stagingOperations.createSchemaIfNotExists(database, schema); + stagingOperations.createTableIfNotExists(database, schema, tmpTable); + stagingOperations.createStageIfNotExists(database, stage); }, Map.of("schema", schema, "stream", stream, "tmpTable", tmpTable, "stage", stage)); @@ -139,14 +138,13 @@ private static AirbyteStreamNameNamespacePair toNameNamespacePair(final WriteCon } private RecordWriter recordWriterFunction(final JdbcDatabase database, - final SqlOperations snowflakeSqlOperations, + final StagingOperations stagingOperations, final List writeConfigs, - final ConfiguredAirbyteCatalog catalog, - final SnowflakeSQLNameTransformer namingResolver) { + final ConfiguredAirbyteCatalog catalog) { final Map pairToWriteConfig = writeConfigs.stream() .collect(Collectors.toUnmodifiableMap( - SnowflakeInternalStagingConsumerFactory::toNameNamespacePair, Function.identity())); + StagingConsumerFactory::toNameNamespacePair, Function.identity())); return (pair, records) -> { if (!pairToWriteConfig.containsKey(pair)) { @@ -157,16 +155,14 @@ private RecordWriter recordWriterFunction(final JdbcDatabase database, final WriteConfig writeConfig = pairToWriteConfig.get(pair); final String schemaName = writeConfig.getOutputSchemaName(); final String tableName = writeConfig.getOutputTableName(); - final String path = namingResolver.getStagingPath(schemaName, tableName, CURRENT_SYNC_PATH); - - snowflakeSqlOperations.insertRecords(database, records, schemaName, path); + final String path = stagingOperations.getStagingPath(RANDOM_CONNECTION_ID, schemaName, tableName, CURRENT_SYNC_PATH); + stagingOperations.insertRecords(database, records, schemaName, path); }; } private OnCloseFunction onCloseFunction(final JdbcDatabase database, - final SnowflakeStagingSqlOperations sqlOperations, - final List writeConfigs, - final SnowflakeSQLNameTransformer namingResolver) { + final StagingOperations stagingOperations, + final List writeConfigs) { return (hasFailed) -> { if (!hasFailed) { final List queryList = new ArrayList<>(); @@ -177,29 +173,29 @@ private OnCloseFunction onCloseFunction(final JdbcDatabase database, final String streamName = writeConfig.getStreamName(); final String srcTableName = writeConfig.getTmpTableName(); final String dstTableName = writeConfig.getOutputTableName(); - final String path = namingResolver.getStagingPath(schemaName, dstTableName, CURRENT_SYNC_PATH); + final String path = stagingOperations.getStagingPath(RANDOM_CONNECTION_ID, schemaName, dstTableName, CURRENT_SYNC_PATH); LOGGER.info("Finalizing stream {}. schema {}, tmp table {}, final table {}, stage path {}", streamName, schemaName, srcTableName, dstTableName, path); try { - sqlOperations.copyIntoTmpTableFromStage(database, path, srcTableName, schemaName); + stagingOperations.copyIntoTmpTableFromStage(database, path, srcTableName, schemaName); } catch (final Exception e) { - sqlOperations.cleanUpStage(database, path); + stagingOperations.cleanUpStage(database, path); LOGGER.info("Cleaning stage path {}", path); throw new RuntimeException("Failed to upload data from stage " + path, e); } - sqlOperations.createTableIfNotExists(database, schemaName, dstTableName); + stagingOperations.createTableIfNotExists(database, schemaName, dstTableName); switch (writeConfig.getSyncMode()) { - case OVERWRITE -> queryList.add(sqlOperations.truncateTableQuery(database, schemaName, dstTableName)); + case OVERWRITE -> queryList.add(stagingOperations.truncateTableQuery(database, schemaName, dstTableName)); case APPEND, APPEND_DEDUP -> {} default -> throw new IllegalStateException("Unrecognized sync mode: " + writeConfig.getSyncMode()); } - queryList.add(sqlOperations.copyTableQuery(database, schemaName, srcTableName, dstTableName)); + queryList.add(stagingOperations.copyTableQuery(database, schemaName, srcTableName, dstTableName)); } LOGGER.info("Executing finalization of tables."); - sqlOperations.executeTransaction(database, queryList); + stagingOperations.executeTransaction(database, queryList); LOGGER.info("Finalizing tables in destination completed."); } LOGGER.info("Cleaning tmp tables in destination started for {} streams", writeConfigs.size()); @@ -209,12 +205,12 @@ private OnCloseFunction onCloseFunction(final JdbcDatabase database, LOGGER.info("Cleaning tmp table in destination started for stream {}. schema {}, tmp table name: {}", writeConfig.getStreamName(), schemaName, tmpTableName); - sqlOperations.dropTableIfExists(database, schemaName, tmpTableName); + stagingOperations.dropTableIfExists(database, schemaName, tmpTableName); final String outputTableName = writeConfig.getOutputTableName(); - final String stageName = namingResolver.getStageName(schemaName, outputTableName); + final String stageName = stagingOperations.getStageName(schemaName, outputTableName); LOGGER.info("Cleaning stage in destination started for stream {}. schema {}, stage: {}", writeConfig.getStreamName(), schemaName, stageName); - sqlOperations.dropStageIfExists(database, stageName); + stagingOperations.dropStageIfExists(database, stageName); } LOGGER.info("Cleaning tmp tables and stages in destination completed."); }; diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java new file mode 100644 index 000000000000..f0ee658a7a76 --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.staging; + +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.destination.jdbc.SqlOperations; +import java.io.File; +import org.joda.time.DateTime; + +public interface StagingOperations extends SqlOperations { + + String getStageName(String schemaName, String tableName); + + String getStagingPath(String connectionId, String schemaName, String tableName, DateTime writeDatetime); + + /** + * Create a staging folder where to upload temporary files before loading into the final destination + */ + void createStageIfNotExists(JdbcDatabase database, String stage) throws Exception; + + /** + * Upload the data file into the stage area.* + */ + void uploadRecordsToStage(JdbcDatabase database, File dataFile, String schemaName, String path) throws Exception; + + /** + * Load the data stored in the stage area into a temporary table in the destination + */ + void copyIntoTmpTableFromStage(JdbcDatabase database, String path, String srcTableName, String schemaName) throws Exception; + + /** + * Remove files that were just staged + */ + void cleanUpStage(JdbcDatabase database, String path) throws Exception; + + /** + * Delete the stage area and all staged files that was in it + */ + void dropStageIfExists(JdbcDatabase database, String stageName) throws Exception; + +} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java index ca75718e9548..c6f058e4f9ff 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java @@ -14,7 +14,7 @@ public String getIdentifier(final String name) { } @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbNameTransformer.java b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbNameTransformer.java index ba9e8e8967df..e2caaa5c6d2c 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbNameTransformer.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbNameTransformer.java @@ -11,7 +11,7 @@ public class MongodbNameTransformer extends ExtendedNameTransformer { @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLNameTransformer.java b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLNameTransformer.java index e62facf6d07f..2702b7ce057b 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLNameTransformer.java +++ b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLNameTransformer.java @@ -9,7 +9,7 @@ public class MSSQLNameTransformer extends ExtendedNameTransformer { @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toUpperCase(); } diff --git a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLNameTransformer.java b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLNameTransformer.java index 5ae0f9a9a9d9..9f9edbbf6043 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLNameTransformer.java +++ b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLNameTransformer.java @@ -57,7 +57,7 @@ static String truncateName(final String name, final int maxLength) { } @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java index b567548ee5c3..7f43f613c2c4 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleNameTransformer.java @@ -12,7 +12,7 @@ public class OracleNameTransformer extends ExtendedNameTransformer { @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toUpperCase(); } diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java index 2dad4cad9692..27d71218f152 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSQLNameTransformer.java @@ -9,7 +9,7 @@ public class PostgresSQLNameTransformer extends ExtendedNameTransformer { @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java index 206d14549d96..2edf60143c6a 100644 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java +++ b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java @@ -14,7 +14,7 @@ public String convertStreamName(String input) { } @Override - protected String applyDefaultCase(String input) { + public String applyDefaultCase(String input) { return input.toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java index 626aa9eb6386..82d8c9c4a1fa 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java @@ -14,6 +14,8 @@ import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.fasterxml.jackson.databind.JsonNode; import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An S3 configuration. Typical usage sets at most one of {@code bucketPath} (necessary for more @@ -22,6 +24,8 @@ */ public class S3DestinationConfig { + private static final Logger LOGGER = LoggerFactory.getLogger(S3DestinationConfig.class); + // The smallest part size is 5MB. An S3 upload can be maximally formed of 10,000 parts. This gives // us an upper limit of 10,000 * 10 / 1000 = 100 GB per table with a 10MB part size limit. // WARNING: Too large a part size can cause potential OOM errors. @@ -36,6 +40,9 @@ public class S3DestinationConfig { private final Integer partSize; private final S3FormatConfig formatConfig; + private final Object lock = new Object(); + private AmazonS3 s3Client = null; + /** * The part size should not matter in any use case that depends on this constructor. So the default * 10 MB is used. @@ -127,6 +134,27 @@ public S3FormatConfig getFormatConfig() { } public AmazonS3 getS3Client() { + synchronized (lock) { + if (s3Client == null) { + return resetS3Client(); + } + return s3Client; + } + } + + public AmazonS3 resetS3Client() { + synchronized (lock) { + if (s3Client != null) { + s3Client.shutdown(); + } + s3Client = createS3Client(); + return s3Client; + } + } + + protected AmazonS3 createS3Client() { + LOGGER.info("Creating S3 client..."); + final AWSCredentials awsCreds = new BasicAWSCredentials(accessKeyId, secretAccessKey); if (accessKeyId.isEmpty() && !secretAccessKey.isEmpty() @@ -134,7 +162,7 @@ public AmazonS3 getS3Client() { throw new RuntimeException("Either both accessKeyId and secretAccessKey should be provided, or neither"); } - if (accessKeyId.isEmpty() && secretAccessKey.isEmpty()) { + if (accessKeyId.isEmpty()) { return AmazonS3ClientBuilder.standard() .withCredentials(new InstanceProfileCredentialsProvider(false)) .build(); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroNameTransformer.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroNameTransformer.java index c39152b1a4fb..d356e177eaa3 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroNameTransformer.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroNameTransformer.java @@ -9,7 +9,7 @@ public class AvroNameTransformer extends ExtendedNameTransformer { @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return super.convertStreamName(input).toLowerCase(); } diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py index 276f9861bbd3..0705ec76e081 100644 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py +++ b/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py @@ -3,19 +3,16 @@ # -from typing import Mapping, Any, Iterable +from typing import Any, Iterable, Mapping from airbyte_cdk import AirbyteLogger from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, ConfiguredAirbyteCatalog, AirbyteMessage, Status +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status class DestinationScaffoldDestinationPython(Destination): def write( - self, - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - input_messages: Iterable[AirbyteMessage] + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] ) -> Iterable[AirbyteMessage]: """ @@ -54,6 +51,3 @@ def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConn return AirbyteConnectionStatus(status=Status.SUCCEEDED) except Exception as e: return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") - - - diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py index b51d89f7f091..4e2dbe001f95 100644 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py +++ b/airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py @@ -9,9 +9,7 @@ "airbyte-cdk", ] -TEST_REQUIREMENTS = [ - "pytest~=6.1" -] +TEST_REQUIREMENTS = ["pytest~=6.1"] setup( name="destination_scaffold_destination_python", diff --git a/airbyte-integrations/connectors/destination-snowflake/README.md b/airbyte-integrations/connectors/destination-snowflake/README.md index e48eaa79fc85..b46d0d13ddee 100644 --- a/airbyte-integrations/connectors/destination-snowflake/README.md +++ b/airbyte-integrations/connectors/destination-snowflake/README.md @@ -19,7 +19,7 @@ ``` ## For Airbyte employees -Put the contents of the `Snowflake Integration Test Config` secret on Rippling under the `Engineering` folder into `secrets/config.json` to be able to run integration tests locally. +Put the contents of the `Snowflake Integration Test Config` secret on LastPass under the `Engineering` folder into `secrets/config.json` to be able to run integration tests locally. 1. Put the contents of the `destination snowflake - insert test creds` LastPass secret into `secrets/insert_config.json`. 1. Put the contents of the `destination snowflake - insert staging test creds` secret into `internal_staging_config.json`. diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java index 33897c854f36..f0ee2f98978c 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java @@ -10,7 +10,9 @@ import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.sentry.AirbyteSentry; +import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; +import io.airbyte.integrations.destination.staging.StagingConsumerFactory; import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; @@ -26,19 +28,23 @@ public class SnowflakeInternalStagingDestination extends AbstractJdbcDestination private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeInternalStagingDestination.class); public SnowflakeInternalStagingDestination() { - super("", new SnowflakeSQLNameTransformer(), new SnowflakeStagingSqlOperations()); + this(new SnowflakeSQLNameTransformer()); + } + + public SnowflakeInternalStagingDestination(final NamingConventionTransformer nameTransformer) { + super("", nameTransformer, new SnowflakeInternalStagingSqlOperations(nameTransformer)); } @Override public AirbyteConnectionStatus check(final JsonNode config) { - final SnowflakeSQLNameTransformer nameTransformer = new SnowflakeSQLNameTransformer(); - final SnowflakeStagingSqlOperations snowflakeStagingSqlOperations = new SnowflakeStagingSqlOperations(); + final NamingConventionTransformer nameTransformer = getNamingResolver(); + final SnowflakeInternalStagingSqlOperations snowflakeInternalStagingSqlOperations = new SnowflakeInternalStagingSqlOperations(nameTransformer); try (final JdbcDatabase database = getDatabase(config)) { - final String outputSchema = super.getNamingResolver().getIdentifier(config.get("schema").asText()); + final String outputSchema = nameTransformer.getIdentifier(config.get("schema").asText()); AirbyteSentry.executeWithTracing("CreateAndDropTable", - () -> attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, snowflakeStagingSqlOperations)); + () -> attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, snowflakeInternalStagingSqlOperations)); AirbyteSentry.executeWithTracing("CreateAndDropStage", - () -> attemptSQLCreateAndDropStages(outputSchema, database, nameTransformer, snowflakeStagingSqlOperations)); + () -> attemptSQLCreateAndDropStages(outputSchema, database, nameTransformer, snowflakeInternalStagingSqlOperations)); return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); } catch (final Exception e) { LOGGER.error("Exception while checking connection: ", e); @@ -50,13 +56,13 @@ public AirbyteConnectionStatus check(final JsonNode config) { private static void attemptSQLCreateAndDropStages(final String outputSchema, final JdbcDatabase database, - final SnowflakeSQLNameTransformer namingResolver, - final SnowflakeStagingSqlOperations sqlOperations) + final NamingConventionTransformer namingResolver, + final SnowflakeInternalStagingSqlOperations sqlOperations) throws Exception { // verify we have permissions to create/drop stage final String outputTableName = namingResolver.getIdentifier("_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", "")); - final String stageName = namingResolver.getStageName(outputSchema, outputTableName); + final String stageName = sqlOperations.getStageName(outputSchema, outputTableName); sqlOperations.createStageIfNotExists(database, stageName); sqlOperations.dropStageIfExists(database, stageName); } @@ -81,8 +87,8 @@ public JsonNode toJdbcConfig(final JsonNode config) { public AirbyteMessageConsumer getConsumer(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final Consumer outputRecordCollector) { - return new SnowflakeInternalStagingConsumerFactory().create(outputRecordCollector, getDatabase(config), - new SnowflakeStagingSqlOperations(), new SnowflakeSQLNameTransformer(), config, catalog); + return new StagingConsumerFactory().create(outputRecordCollector, getDatabase(config), + new SnowflakeInternalStagingSqlOperations(getNamingResolver()), getNamingResolver(), config, catalog); } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeStagingSqlOperations.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java similarity index 65% rename from airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeStagingSqlOperations.java rename to airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java index 6ef030b42c5b..b9344077ec5d 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeStagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java @@ -6,7 +6,8 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.sentry.AirbyteSentry; -import io.airbyte.integrations.destination.jdbc.SqlOperations; +import io.airbyte.integrations.destination.NamingConventionTransformer; +import io.airbyte.integrations.destination.staging.StagingOperations; import io.airbyte.protocol.models.AirbyteRecordMessage; import java.io.File; import java.nio.file.Files; @@ -14,12 +15,43 @@ import java.util.List; import java.util.Map; import java.util.UUID; +import org.apache.commons.lang3.NotImplementedException; +import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SnowflakeStagingSqlOperations extends SnowflakeSqlOperations implements SqlOperations { +public class SnowflakeInternalStagingSqlOperations extends SnowflakeSqlOperations implements StagingOperations { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSqlOperations.class); + private final NamingConventionTransformer nameTransformer; + + public SnowflakeInternalStagingSqlOperations(final NamingConventionTransformer nameTransformer) { + this.nameTransformer = nameTransformer; + } + + @Override + public String getStageName(final String namespace, final String streamName) { + return nameTransformer.applyDefaultCase(String.join("_", + nameTransformer.convertStreamName(namespace), + nameTransformer.convertStreamName(streamName))); + } + + @Override + public String getStagingPath(final String connectionId, final String namespace, final String streamName, final DateTime writeDatetime) { + // see https://docs.snowflake.com/en/user-guide/data-load-considerations-stage.html + return nameTransformer.applyDefaultCase(String.format("%s/%s/%02d/%02d/%02d/%s/", + getStageName(namespace, streamName), + writeDatetime.year().get(), + writeDatetime.monthOfYear().get(), + writeDatetime.dayOfMonth().get(), + writeDatetime.hourOfDay().get(), + connectionId)); + } + + @Override + public void uploadRecordsToStage(final JdbcDatabase database, final File dataFile, final String schemaName, final String path) throws Exception { + throw new NotImplementedException("placeholder function is not implemented yet"); + } @Override public void insertRecordsInternal(final JdbcDatabase database, @@ -46,6 +78,7 @@ private void loadDataIntoStage(final JdbcDatabase database, final String stage, Files.delete(tempFile.toPath()); } + @Override public void createStageIfNotExists(final JdbcDatabase database, final String stageName) throws SQLException { final String query = "CREATE STAGE IF NOT EXISTS %s encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');"; AirbyteSentry.executeWithTracing("CreateStageIfNotExists", @@ -53,6 +86,7 @@ public void createStageIfNotExists(final JdbcDatabase database, final String sta Map.of("stage", stageName)); } + @Override public void copyIntoTmpTableFromStage(final JdbcDatabase database, final String stageName, final String dstTableName, final String schemaName) throws SQLException { final String query = "COPY INTO %s.%s FROM @%s file_format = " + @@ -62,12 +96,14 @@ public void copyIntoTmpTableFromStage(final JdbcDatabase database, final String Map.of("schema", schemaName, "stage", stageName, "table", dstTableName)); } + @Override public void dropStageIfExists(final JdbcDatabase database, final String stageName) throws SQLException { AirbyteSentry.executeWithTracing("DropStageIfExists", () -> database.execute(String.format("DROP STAGE IF EXISTS %s;", stageName)), Map.of("stage", stageName)); } + @Override public void cleanUpStage(final JdbcDatabase database, final String path) throws SQLException { AirbyteSentry.executeWithTracing("CleanStage", () -> database.execute(String.format("REMOVE @%s;", path)), diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSQLNameTransformer.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSQLNameTransformer.java index 373c3aa09983..cea4bf0b88ea 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSQLNameTransformer.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeSQLNameTransformer.java @@ -9,16 +9,8 @@ public class SnowflakeSQLNameTransformer extends ExtendedNameTransformer { @Override - protected String applyDefaultCase(final String input) { + public String applyDefaultCase(final String input) { return input.toUpperCase(); } - public String getStageName(String schemaName, String outputTableName) { - return schemaName.concat(outputTableName).replaceAll("-", "_").toUpperCase(); - } - - public String getStagingPath(String schemaName, String tableName, String currentSyncPath) { - return (getStageName(schemaName, tableName) + "/staged/" + currentSyncPath).toUpperCase(); - } - } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java index ecf3b2d688f7..74497672f5a0 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java @@ -13,6 +13,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -23,6 +24,7 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.base.Destination; +import io.airbyte.integrations.destination.staging.StagingConsumerFactory; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.CatalogHelpers; @@ -82,17 +84,19 @@ public void useInsertStrategyTest() { @Test public void testCleanupStageOnFailure() throws Exception { - JdbcDatabase mockDb = mock(JdbcDatabase.class); - SnowflakeStagingSqlOperations sqlOperations = mock(SnowflakeStagingSqlOperations.class); + final JdbcDatabase mockDb = mock(JdbcDatabase.class); + final SnowflakeInternalStagingSqlOperations sqlOperations = mock(SnowflakeInternalStagingSqlOperations.class); + when(sqlOperations.getStageName(anyString(), anyString())).thenReturn("stage_name"); + when(sqlOperations.getStagingPath(anyString(), anyString(), anyString(), any())).thenReturn("staging_path"); final var testMessages = generateTestMessages(); final JsonNode config = Jsons.deserialize(MoreResources.readResource("insert_config.json"), JsonNode.class); - AirbyteMessageConsumer airbyteMessageConsumer = new SnowflakeInternalStagingConsumerFactory() + final AirbyteMessageConsumer airbyteMessageConsumer = new StagingConsumerFactory() .create(Destination::defaultOutputRecordCollector, mockDb, sqlOperations, new SnowflakeSQLNameTransformer(), config, getCatalog()); doThrow(SQLException.class).when(sqlOperations).copyIntoTmpTableFromStage(any(), anyString(), anyString(), anyString()); airbyteMessageConsumer.start(); - for (AirbyteMessage m : testMessages) { + for (final AirbyteMessage m : testMessages) { airbyteMessageConsumer.accept(m); } assertThrows(RuntimeException.class, airbyteMessageConsumer::close); diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py index b1a5c2811637..4a0e1ebf1f30 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py @@ -848,7 +848,7 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, payload["after"] = int(response["paging"]["next"]["after"]) return {"params": params, "payload": payload} - + def stream_slices( self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None ) -> Iterable[Optional[Mapping[str, Any]]]: diff --git a/airbyte-integrations/connectors/source-recurly/.python-version b/airbyte-integrations/connectors/source-recurly/.python-version deleted file mode 100644 index b13d4f55c681..000000000000 --- a/airbyte-integrations/connectors/source-recurly/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9.7/envs/airbyte-recurly diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json index aeedc323d898..75bcfeaf58ad 100644 --- a/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json @@ -84,7 +84,7 @@ "sync_mode": "incremental", "destination_sync_mode": "overwrite" }, - { + { "stream": { "name": "export_dates", "json_schema": {}, diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json index dec8c3de5f14..013509268cf4 100644 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json @@ -62,48 +62,48 @@ "maxLength": 30 }, "address": { - "type": "object", - "properties": { - "phone": { - "type": "string", - "title": "Phone number", - "maxLength": 256 - }, - "street1": { - "type": "string", - "title": "Street 1", - "maxLength": 256 - }, - "street2": { - "type": "string", - "title": "Street 2", - "maxLength": 256 - }, - "city": { - "type": "string", - "title": "City", - "maxLength": 256 - }, - "region": { - "type": "string", - "title": "State/Province", - "description": "State or province.", - "maxLength": 256 - }, - "postal_code": { - "type": "string", - "title": "Zip/Postal code", - "description": "Zip or postal code.", - "maxLength": 256 - }, - "country": { - "type": "string", - "title": "Country", - "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", - "maxLength": 2 - } - } + "type": "object", + "properties": { + "phone": { + "type": "string", + "title": "Phone number", + "maxLength": 256 }, + "street1": { + "type": "string", + "title": "Street 1", + "maxLength": 256 + }, + "street2": { + "type": "string", + "title": "Street 2", + "maxLength": 256 + }, + "city": { + "type": "string", + "title": "City", + "maxLength": 256 + }, + "region": { + "type": "string", + "title": "State/Province", + "description": "State or province.", + "maxLength": 256 + }, + "postal_code": { + "type": "string", + "title": "Zip/Postal code", + "description": "Zip or postal code.", + "maxLength": 256 + }, + "country": { + "type": "string", + "title": "Country", + "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", + "maxLength": 2 + } + } + }, "custom_fields": { "type": ["null", "array"] }, diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json index 071d937eee11..eeed47064a7c 100644 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json @@ -213,13 +213,13 @@ "type": "string", "title": "Type", "description": "Provides the tax type as \"vat\" for EU VAT, \"usst\" for U.S. Sales Tax, or the 2 letter country code for country level tax types like Canada, Australia, New Zealand, Israel, and all non-EU European countries.", - "maxLength": 256 + "maxLength": 256 }, "region": { "type": "string", "title": "Region", "description": "Provides the tax region applied on an invoice. For U.S. Sales Tax, this will be the 2 letter state code. For EU VAT this will be the 2 letter country code. For all country level tax types, this will display the regional tax, like VAT, GST, or PST.", - "maxLength": 256 + "maxLength": 256 }, "rate": { "type": "number", diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json index ba53386cc2ba..660e28842f2f 100644 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json @@ -190,15 +190,15 @@ }, "ip_address_v4": { "type": ["null", "string"], - "maxLength": 256 + "maxLength": 256 }, "ip_address_country": { "type": ["null", "string"], - "maxLength": 256 + "maxLength": 256 }, "status_code": { "type": ["null", "string"], - "maxLength": 256 + "maxLength": 256 }, "status_message": { "type": ["null", "string"], diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json b/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json index 18a30248383e..02a427f7378b 100644 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json @@ -31,4 +31,3 @@ } } } - diff --git a/airbyte-integrations/connectors/source-shopify/integration_tests/state.json b/airbyte-integrations/connectors/source-shopify/integration_tests/state.json index 82d854aa6c9d..263c4268de83 100644 --- a/airbyte-integrations/connectors/source-shopify/integration_tests/state.json +++ b/airbyte-integrations/connectors/source-shopify/integration_tests/state.json @@ -78,4 +78,4 @@ "updated_at": "2022-03-03T03:47:46-08:00" } } -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/source.py b/airbyte-integrations/connectors/source-shopify/source_shopify/source.py index 0e691ac67fb6..1e804a1dc14d 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/source.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/source.py @@ -189,8 +189,8 @@ def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Ite Reading the parent stream for slices with structure: EXAMPLE: for given nested_record as `id` of Orders, - Outputs: - [ + Outputs: + [ {slice_key: 123}, {slice_key: 456}, {...}, diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSourceOperations.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSourceOperations.java index ad9da09a969e..c5be4ffb929e 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSourceOperations.java +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSourceOperations.java @@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.db.jdbc.JdbcSourceOperations; - import java.math.BigDecimal; import java.sql.PreparedStatement; import java.sql.ResultSet; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java b/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java index 66aee63c62df..8f644d8b6a5f 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/DefaultReplicationWorker.java @@ -32,6 +32,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; +import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; @@ -297,11 +298,11 @@ private static Runnable getReplicationRunnable(final AirbyteSource source, recordsRead += 1; if (recordsRead % 1000 == 0) { - LOGGER.info("Records read: {}", recordsRead); + LOGGER.info("Records read: {} ({})", recordsRead, FileUtils.byteCountToDisplaySize(messageTracker.getTotalBytesEmitted())); } } } - LOGGER.info("Total records read: {}", recordsRead); + LOGGER.info("Total records read: {} ({})", recordsRead, FileUtils.byteCountToDisplaySize(messageTracker.getTotalBytesEmitted())); try { destination.notifyEndOfStream(); } catch (final Exception e) { From 274b0bdd885c47923294ccc0efdc8535b80136da Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Sat, 12 Mar 2022 02:03:54 +0800 Subject: [PATCH 19/38] Refactor Reporter App. (#11070) Refactor for readability. Create a separate enum class. This enum is added to when a dev needs to add another metric to the reporter. This helps us isolate the emission logic + scheduling configuration from the actual threads pushing the logic. --- .../io/airbyte/commons/lang/Exceptions.java | 18 +++++ .../metrics/lib/DogStatsDMetricSingleton.java | 14 ++-- .../airbyte/metrics/reporter/ReporterApp.java | 74 +++---------------- .../io/airbyte/metrics/reporter/ToEmit.java | 62 ++++++++++++++++ 4 files changed, 96 insertions(+), 72 deletions(-) create mode 100644 airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java b/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java index 85fbff1fa8d3..a2417c77d964 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java @@ -6,9 +6,11 @@ import java.util.concurrent.Callable; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@Slf4j public class Exceptions { private static final Logger LOGGER = LoggerFactory.getLogger(Exceptions.class); @@ -39,6 +41,22 @@ public static void toRuntime(final Procedure voidCallable) { castCheckedToRuntime(voidCallable, RuntimeException::new); } + /** + * Return a Runnable that logs anonymous function exceptions. + * + * @param voidCallable + * @return + */ + public static Runnable toSwallowExceptionRunnable(final Procedure voidCallable) { + return () -> { + try { + voidCallable.call(); + } catch (Exception e) { + log.error("Exception: ", e); + } + }; + } + public static void toIllegalState(final Procedure voidCallable) { castCheckedToRuntime(voidCallable, IllegalStateException::new); } diff --git a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricSingleton.java b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricSingleton.java index d2a55b6752a8..e6e45e058163 100644 --- a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricSingleton.java +++ b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricSingleton.java @@ -66,11 +66,11 @@ public static void count(final MetricsRegistry metric, final double amt, final S if (instancePublish) { if (statsDClient == null) { // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, count {} not emitted", metric.metricName); + log.warn("singleton not initialized, count {} not emitted", metric); return; } - log.info("publishing count, name: {}, value: {}, tags: {}", metric.metricName, amt, tags); + log.info("publishing count, name: {}, value: {}, tags: {}", metric, amt, tags); statsDClient.count(metric.metricName, amt, tags); } } @@ -86,7 +86,7 @@ public static void gauge(final MetricsRegistry metric, final double val, final S if (instancePublish) { if (statsDClient == null) { // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, gauge {} not emitted", metric.metricName); + log.warn("singleton not initialized, gauge {} not emitted", metric); return; } @@ -113,11 +113,11 @@ public static void recordTimeLocal(final MetricsRegistry metric, final double va if (instancePublish) { if (statsDClient == null) { // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, histogram {} not emitted", metric.metricName); + log.warn("singleton not initialized, histogram {} not emitted", metric); return; } - log.info("recording histogram, name: {}, value: {}, tags: {}", metric.metricName, val, tags); + log.info("recording histogram, name: {}, value: {}, tags: {}", metric, val, tags); statsDClient.histogram(metric.metricName, val, tags); } } @@ -134,11 +134,11 @@ public static void recordTimeGlobal(final MetricsRegistry metric, final double v if (instancePublish) { if (statsDClient == null) { // do not loudly fail to prevent application disruption - log.warn("singleton not initialized, distribution {} not emitted", metric.metricName); + log.warn("singleton not initialized, distribution {} not emitted", metric); return; } - log.info("recording distribution, name: {}, value: {}, tags: {}", metric.metricName, val, tags); + log.info("recording distribution, name: {}, value: {}, tags: {}", metric, val, tags); statsDClient.distribution(metric.metricName, val, tags); } } diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java index afba7778d572..c03d53596f43 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java @@ -8,92 +8,36 @@ import io.airbyte.config.EnvConfigs; import io.airbyte.db.Database; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; -import io.airbyte.db.instance.jobs.jooq.enums.JobStatus; import io.airbyte.metrics.lib.DatadogClientConfiguration; import io.airbyte.metrics.lib.DogStatsDMetricSingleton; import io.airbyte.metrics.lib.MetricEmittingApps; -import io.airbyte.metrics.lib.MetricQueries; -import io.airbyte.metrics.lib.MetricTags; -import io.airbyte.metrics.lib.MetricsRegistry; import java.io.IOException; -import java.sql.SQLException; import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.tuple.Pair; @Slf4j public class ReporterApp { + public static Database configDatabase; + public static void main(final String[] args) throws IOException, InterruptedException { final Configs configs = new EnvConfigs(); DogStatsDMetricSingleton.initialize(MetricEmittingApps.METRICS_REPORTER, new DatadogClientConfiguration(configs)); - final Database configDatabase = new ConfigsDatabaseInstance( + configDatabase = new ConfigsDatabaseInstance( configs.getConfigDatabaseUser(), configs.getConfigDatabasePassword(), configs.getConfigDatabaseUrl()) .getInitialized(); - final var pollers = Executors.newScheduledThreadPool(5); - - log.info("Starting pollers.."); - pollers.scheduleAtFixedRate(() -> { - try { - final var pendingJobs = configDatabase.query(MetricQueries::numberOfPendingJobs); - DogStatsDMetricSingleton.gauge(MetricsRegistry.NUM_PENDING_JOBS, pendingJobs); - } catch (final SQLException e) { - e.printStackTrace(); - } - }, 0, 15, TimeUnit.SECONDS); - pollers.scheduleAtFixedRate(() -> { - try { - final var runningJobs = configDatabase.query(MetricQueries::numberOfRunningJobs); - DogStatsDMetricSingleton.gauge(MetricsRegistry.NUM_RUNNING_JOBS, runningJobs); - } catch (final SQLException e) { - e.printStackTrace(); - } - }, 0, 15, TimeUnit.SECONDS); - pollers.scheduleAtFixedRate(() -> { - try { - final var age = configDatabase.query(MetricQueries::oldestRunningJobAgeSecs); - DogStatsDMetricSingleton.gauge(MetricsRegistry.OLDEST_RUNNING_JOB_AGE_SECS, age); - } catch (final SQLException e) { - e.printStackTrace(); - } - }, 0, 15, TimeUnit.SECONDS); - pollers.scheduleAtFixedRate(() -> { - try { - final var age = configDatabase.query(MetricQueries::oldestPendingJobAgeSecs); - DogStatsDMetricSingleton.gauge(MetricsRegistry.OLDEST_PENDING_JOB_AGE_SECS, age); - } catch (final SQLException e) { - e.printStackTrace(); - } - }, 0, 15, TimeUnit.SECONDS); - pollers.scheduleAtFixedRate(() -> { - try { - final var age = configDatabase.query(MetricQueries::numberOfActiveConnPerWorkspace); - for (long count : age) { - DogStatsDMetricSingleton.percentile(MetricsRegistry.NUM_ACTIVE_CONN_PER_WORKSPACE, count); - } - } catch (final SQLException e) { - e.printStackTrace(); - } - }, 0, 15, TimeUnit.SECONDS); - pollers.scheduleAtFixedRate(() -> { - try { - final var times = configDatabase.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); - for (Pair pair : times) { - DogStatsDMetricSingleton.recordTimeGlobal( - MetricsRegistry.OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS, pair.getRight(), MetricTags.getJobStatus(pair.getLeft())); - } - } catch (final SQLException e) { - e.printStackTrace(); - } - }, 0, 1, TimeUnit.HOURS); + final var toEmits = ToEmit.values(); + final var pollers = Executors.newScheduledThreadPool(toEmits.length); - Thread.sleep(1000_000 * 1000); + log.info("Scheduling {} metrics for emission..", toEmits.length); + for (ToEmit toEmit : toEmits) { + pollers.scheduleAtFixedRate(toEmit.emitRunnable, 0, toEmit.period, toEmit.timeUnit); + } } } diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java new file mode 100644 index 000000000000..8a746d875359 --- /dev/null +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.metrics.reporter; + +import io.airbyte.commons.lang.Exceptions; +import io.airbyte.db.instance.jobs.jooq.enums.JobStatus; +import io.airbyte.metrics.lib.DogStatsDMetricSingleton; +import io.airbyte.metrics.lib.MetricQueries; +import io.airbyte.metrics.lib.MetricTags; +import io.airbyte.metrics.lib.MetricsRegistry; +import java.util.concurrent.TimeUnit; +import lombok.AllArgsConstructor; +import org.apache.commons.lang3.tuple.Pair; + +/** + * This class contains all metrics emitted by the {@link ReporterApp}. + */ +@AllArgsConstructor +public enum ToEmit { + + NUM_PENDING_JOBS(Exceptions.toSwallowExceptionRunnable(() -> { + final var pendingJobs = ReporterApp.configDatabase.query(MetricQueries::numberOfPendingJobs); + DogStatsDMetricSingleton.gauge(MetricsRegistry.NUM_PENDING_JOBS, pendingJobs); + })), + NUM_RUNNING_JOBS(Exceptions.toSwallowExceptionRunnable(() -> { + final var runningJobs = ReporterApp.configDatabase.query(MetricQueries::numberOfRunningJobs); + DogStatsDMetricSingleton.gauge(MetricsRegistry.NUM_RUNNING_JOBS, runningJobs); + })), + OLDEST_RUNNING_JOB_AGE_SECS(Exceptions.toSwallowExceptionRunnable(() -> { + final var age = ReporterApp.configDatabase.query(MetricQueries::oldestRunningJobAgeSecs); + DogStatsDMetricSingleton.gauge(MetricsRegistry.OLDEST_RUNNING_JOB_AGE_SECS, age); + })), + OLDEST_PENDING_JOB_AGE_SECS(Exceptions.toSwallowExceptionRunnable(() -> { + final var age = ReporterApp.configDatabase.query(MetricQueries::oldestPendingJobAgeSecs); + DogStatsDMetricSingleton.gauge(MetricsRegistry.OLDEST_PENDING_JOB_AGE_SECS, age); + })), + NUM_ACTIVE_CONN_PER_WORKSPACE(Exceptions.toSwallowExceptionRunnable(() -> { + final var age = ReporterApp.configDatabase.query(MetricQueries::numberOfActiveConnPerWorkspace); + for (long count : age) { + DogStatsDMetricSingleton.percentile(MetricsRegistry.NUM_ACTIVE_CONN_PER_WORKSPACE, count); + } + })), + OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS(Exceptions.toSwallowExceptionRunnable(() -> { + final var times = ReporterApp.configDatabase.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); + for (Pair pair : times) { + DogStatsDMetricSingleton.recordTimeGlobal( + MetricsRegistry.OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS, pair.getRight(), MetricTags.getJobStatus(pair.getLeft())); + } + }), 1, TimeUnit.HOURS); + + // default constructor + final public Runnable emitRunnable; + final public long period; + final public TimeUnit timeUnit; + + ToEmit(Runnable toEmit) { + this(toEmit, 15, TimeUnit.SECONDS); + } + +} From 5c1066d1715f4caefcb5962a55152ae2ea4bb813 Mon Sep 17 00:00:00 2001 From: oneshcheret <33333155+sashaNeshcheret@users.noreply.github.com> Date: Fri, 11 Mar 2022 21:17:14 +0200 Subject: [PATCH 20/38] Snowflake destination: added unit tests (#10699) * Snowflake destination: added unit tests * Snowflake destination: randomize sensitive data * Snowflake destination: revert s3 unit tests * Snowflake destination: added unit tests * Snowflake destination: added unit tests * Snowflake destination: added unit tests * Snowflake destination: bump version * Snowflake destination: bump spec version * Snowflake destination: bump version * Snowflake destination: bump spec version --- .../seed/destination_definitions.yaml | 2 +- .../resources/seed/destination_specs.yaml | 7 ++- .../destination-snowflake/Dockerfile | 2 +- .../snowflake/SnowflakeDestination.java | 34 +------------ .../SnowflakeDestinationResolver.java | 39 ++++++++++++++ ...SnowflakeInternalStagingSqlOperations.java | 25 ++++++--- ...flakeGcsCopyDestinationAcceptanceTest.java | 4 +- ...wflakeInsertDestinationAcceptanceTest.java | 4 +- ...ernalStagingDestinatiomAcceptanceTest.java | 4 +- ...wflakeS3CopyDestinationAcceptanceTest.java | 4 +- .../snowflake/SnowflakeDestinationTest.java | 32 ++++++++++-- .../SnowflakeGCSStreamCopierTest.java | 12 ++--- ...flakeInternalStagingSqlOperationsTest.java | 35 +++++++++++++ .../SnowflakeS3StreamCopierTest.java | 9 ++-- .../snowflake/SnowflakeSqlOperationsTest.java | 51 +++++++++++++++++++ .../src/test/resources/copy_gcs_config.json | 15 ++++++ .../src/test/resources/copy_s3_config.json | 17 +++++++ docs/integrations/destinations/snowflake.md | 1 + 18 files changed, 231 insertions(+), 66 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_gcs_config.json create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index b1d40ff730ff..9c046b76f5d9 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -221,7 +221,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.4.17 + dockerImageTag: 0.4.19 documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake icon: snowflake.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 00c684bf3060..69d55337b614 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3825,7 +3825,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.4.17" +- dockerImage: "airbyte/destination-snowflake:0.4.19" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" connectionSpecification: @@ -3844,9 +3844,12 @@ properties: host: description: "The host domain of the snowflake instance (must include the\ - \ account, region, cloud environment, and end with snowflakecomputing.com)." + \ account, region, cloud environment, and end with snowflakecomputing.com).\ + \ The account identifier differs depending on your cloud region, be sure\ + \ to verify with Snowflake's documentation." examples: - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.snowflakecomputing.com" type: "string" title: "Host" order: 0 diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index 0b26088896b2..3ccf7eda0bd7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -20,5 +20,5 @@ RUN tar xf ${APPLICATION}.tar --strip-components=1 ENV ENABLE_SENTRY true -LABEL io.airbyte.version=0.4.17 +LABEL io.airbyte.version=0.4.19 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java index e7da160fbe0b..37c8987d555d 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java @@ -4,12 +4,9 @@ package io.airbyte.integrations.destination.snowflake; -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.destination.jdbc.copy.SwitchingDestination; -import java.util.Map; public class SnowflakeDestination extends SwitchingDestination { @@ -20,36 +17,7 @@ enum DestinationType { } public SnowflakeDestination() { - super(DestinationType.class, SnowflakeDestination::getTypeFromConfig, getTypeToDestination()); - } - - private static DestinationType getTypeFromConfig(final JsonNode config) { - if (isS3Copy(config)) { - return DestinationType.COPY_S3; - } else if (isGcsCopy(config)) { - return DestinationType.COPY_GCS; - } else { - return DestinationType.INTERNAL_STAGING; - } - } - - public static boolean isS3Copy(final JsonNode config) { - return config.has("loading_method") && config.get("loading_method").isObject() && config.get("loading_method").has("s3_bucket_name"); - } - - public static boolean isGcsCopy(final JsonNode config) { - return config.has("loading_method") && config.get("loading_method").isObject() && config.get("loading_method").has("project_id"); - } - - private static Map getTypeToDestination() { - final SnowflakeCopyS3Destination copyS3Destination = new SnowflakeCopyS3Destination(); - final SnowflakeCopyGcsDestination copyGcsDestination = new SnowflakeCopyGcsDestination(); - final SnowflakeInternalStagingDestination internalStagingDestination = new SnowflakeInternalStagingDestination(); - - return ImmutableMap.of( - DestinationType.COPY_S3, copyS3Destination, - DestinationType.COPY_GCS, copyGcsDestination, - DestinationType.INTERNAL_STAGING, internalStagingDestination); + super(DestinationType.class, SnowflakeDestinationResolver::getTypeFromConfig, SnowflakeDestinationResolver.getTypeToDestination()); } public static void main(final String[] args) throws Exception { diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java new file mode 100644 index 000000000000..0ca94299d6a3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java @@ -0,0 +1,39 @@ +package io.airbyte.integrations.destination.snowflake; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.integrations.base.Destination; +import io.airbyte.integrations.destination.snowflake.SnowflakeDestination.DestinationType; +import java.util.Map; + +public class SnowflakeDestinationResolver { + + public static DestinationType getTypeFromConfig(final JsonNode config) { + if (isS3Copy(config)) { + return DestinationType.COPY_S3; + } else if (isGcsCopy(config)) { + return DestinationType.COPY_GCS; + } else { + return DestinationType.INTERNAL_STAGING; + } + } + + public static boolean isS3Copy(final JsonNode config) { + return config.has("loading_method") && config.get("loading_method").isObject() && config.get("loading_method").has("s3_bucket_name"); + } + + public static boolean isGcsCopy(final JsonNode config) { + return config.has("loading_method") && config.get("loading_method").isObject() && config.get("loading_method").has("project_id"); + } + + public static Map getTypeToDestination() { + final SnowflakeCopyS3Destination copyS3Destination = new SnowflakeCopyS3Destination(); + final SnowflakeCopyGcsDestination copyGcsDestination = new SnowflakeCopyGcsDestination(); + final SnowflakeInternalStagingDestination internalStagingDestination = new SnowflakeInternalStagingDestination(); + + return ImmutableMap.of( + DestinationType.COPY_S3, copyS3Destination, + DestinationType.COPY_GCS, copyGcsDestination, + DestinationType.INTERNAL_STAGING, internalStagingDestination); + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java index b9344077ec5d..b4c513eb017e 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java @@ -22,6 +22,10 @@ public class SnowflakeInternalStagingSqlOperations extends SnowflakeSqlOperations implements StagingOperations { + public static final String CREATE_STAGE_QUERY = "CREATE STAGE IF NOT EXISTS %s encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');"; + public static final String COPY_QUERY = "COPY INTO %s.%s FROM @%s file_format = " + + "(type = csv field_delimiter = ',' skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = '\"')"; + public static final String DROP_STAGE_QUERY = "DROP STAGE IF EXISTS %s;"; private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSqlOperations.class); private final NamingConventionTransformer nameTransformer; @@ -80,29 +84,38 @@ private void loadDataIntoStage(final JdbcDatabase database, final String stage, @Override public void createStageIfNotExists(final JdbcDatabase database, final String stageName) throws SQLException { - final String query = "CREATE STAGE IF NOT EXISTS %s encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');"; AirbyteSentry.executeWithTracing("CreateStageIfNotExists", - () -> database.execute(String.format(query, stageName)), + () -> database.execute(getCreateStageQuery(stageName)), Map.of("stage", stageName)); } @Override public void copyIntoTmpTableFromStage(final JdbcDatabase database, final String stageName, final String dstTableName, final String schemaName) throws SQLException { - final String query = "COPY INTO %s.%s FROM @%s file_format = " + - "(type = csv field_delimiter = ',' skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = '\"')"; AirbyteSentry.executeWithTracing("CopyIntoTableFromStage", - () -> database.execute(String.format(query, schemaName, dstTableName, stageName)), + () -> database.execute(getCopyQuery(stageName, dstTableName, schemaName)), Map.of("schema", schemaName, "stage", stageName, "table", dstTableName)); } + String getCreateStageQuery(String stageName) { + return String.format(CREATE_STAGE_QUERY, stageName); + } + + String getCopyQuery(String stageName, String dstTableName, String schemaName) { + return String.format(COPY_QUERY, schemaName, dstTableName, stageName); + } + @Override public void dropStageIfExists(final JdbcDatabase database, final String stageName) throws SQLException { AirbyteSentry.executeWithTracing("DropStageIfExists", - () -> database.execute(String.format("DROP STAGE IF EXISTS %s;", stageName)), + () -> database.execute(getDropQuery(stageName)), Map.of("stage", stageName)); } + String getDropQuery(String stageName) { + return String.format(DROP_STAGE_QUERY, stageName); + } + @Override public void cleanUpStage(final JdbcDatabase database, final String path) throws SQLException { AirbyteSentry.executeWithTracing("CleanStage", diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsCopyDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsCopyDestinationAcceptanceTest.java index 9aaaa2a0f88e..c4d8c51b09ce 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsCopyDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsCopyDestinationAcceptanceTest.java @@ -15,8 +15,8 @@ public class SnowflakeGcsCopyDestinationAcceptanceTest extends SnowflakeInsertDe @Override public JsonNode getStaticConfig() { final JsonNode copyConfig = Jsons.deserialize(IOs.readFile(Path.of("secrets/copy_gcs_config.json"))); - Preconditions.checkArgument(SnowflakeDestination.isGcsCopy(copyConfig)); - Preconditions.checkArgument(!SnowflakeDestination.isS3Copy(copyConfig)); + Preconditions.checkArgument(SnowflakeDestinationResolver.isGcsCopy(copyConfig)); + Preconditions.checkArgument(!SnowflakeDestinationResolver.isS3Copy(copyConfig)); return copyConfig; } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java index 3b8767b95226..e64f9301e114 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java @@ -56,8 +56,8 @@ protected JsonNode getConfig() { public JsonNode getStaticConfig() { final JsonNode insertConfig = Jsons.deserialize(IOs.readFile(Path.of("secrets/insert_config.json"))); - Preconditions.checkArgument(!SnowflakeDestination.isS3Copy(insertConfig)); - Preconditions.checkArgument(!SnowflakeDestination.isGcsCopy(insertConfig)); + Preconditions.checkArgument(!SnowflakeDestinationResolver.isS3Copy(insertConfig)); + Preconditions.checkArgument(!SnowflakeDestinationResolver.isGcsCopy(insertConfig)); return insertConfig; } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestinatiomAcceptanceTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestinatiomAcceptanceTest.java index 0e431b819256..53adb9c4eac4 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestinatiomAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestinatiomAcceptanceTest.java @@ -14,8 +14,8 @@ public class SnowflakeInternalStagingDestinatiomAcceptanceTest extends Snowflake public JsonNode getStaticConfig() { final JsonNode internalStagingConfig = Jsons.deserialize(IOs.readFile(Path.of("secrets/internal_staging_config.json"))); - Preconditions.checkArgument(!SnowflakeDestination.isS3Copy(internalStagingConfig)); - Preconditions.checkArgument(!SnowflakeDestination.isGcsCopy(internalStagingConfig)); + Preconditions.checkArgument(!SnowflakeDestinationResolver.isS3Copy(internalStagingConfig)); + Preconditions.checkArgument(!SnowflakeDestinationResolver.isGcsCopy(internalStagingConfig)); return internalStagingConfig; } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3CopyDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3CopyDestinationAcceptanceTest.java index 35cb12184ae6..7c2c3a9cc137 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3CopyDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3CopyDestinationAcceptanceTest.java @@ -15,8 +15,8 @@ public class SnowflakeS3CopyDestinationAcceptanceTest extends SnowflakeInsertDes @Override public JsonNode getStaticConfig() { final JsonNode copyConfig = Jsons.deserialize(IOs.readFile(Path.of("secrets/copy_s3_config.json"))); - Preconditions.checkArgument(SnowflakeDestination.isS3Copy(copyConfig)); - Preconditions.checkArgument(!SnowflakeDestination.isGcsCopy(copyConfig)); + Preconditions.checkArgument(SnowflakeDestinationResolver.isS3Copy(copyConfig)); + Preconditions.checkArgument(!SnowflakeDestinationResolver.isGcsCopy(copyConfig)); return copyConfig; } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java index 74497672f5a0..2f9d2953f515 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.java @@ -4,13 +4,17 @@ package io.airbyte.integrations.destination.snowflake; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.params.provider.Arguments.arguments; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.any; +import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -25,6 +29,7 @@ import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.destination.staging.StagingConsumerFactory; +import io.airbyte.integrations.destination.snowflake.SnowflakeDestination.DestinationType; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.CatalogHelpers; @@ -37,8 +42,12 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; public class SnowflakeDestinationTest { @@ -55,7 +64,7 @@ public void useS3CopyStrategyTest() { final var stubConfig = mapper.createObjectNode(); stubConfig.set("loading_method", stubLoadingMethod); - assertTrue(SnowflakeDestination.isS3Copy(stubConfig)); + assertTrue(SnowflakeDestinationResolver.isS3Copy(stubConfig)); } @Test @@ -69,7 +78,7 @@ public void useGcsCopyStrategyTest() { final var stubConfig = mapper.createObjectNode(); stubConfig.set("loading_method", stubLoadingMethod); - assertTrue(SnowflakeDestination.isGcsCopy(stubConfig)); + assertTrue(SnowflakeDestinationResolver.isGcsCopy(stubConfig)); } @Test @@ -78,7 +87,7 @@ public void useInsertStrategyTest() { final var stubLoadingMethod = mapper.createObjectNode(); final var stubConfig = mapper.createObjectNode(); stubConfig.set("loading_method", stubLoadingMethod); - assertFalse(SnowflakeDestination.isS3Copy(stubConfig)); + assertFalse(SnowflakeDestinationResolver.isS3Copy(stubConfig)); } @Test @@ -104,6 +113,23 @@ public void testCleanupStageOnFailure() throws Exception { verify(sqlOperations, times(1)).cleanUpStage(any(), anyString()); } + @ParameterizedTest + @MethodSource("destinationTypeToConfig") + public void testS3ConfigType(String configFileName, DestinationType expectedDestinationType) throws Exception { + final JsonNode config = Jsons.deserialize(MoreResources.readResource(configFileName), JsonNode.class); + DestinationType typeFromConfig = SnowflakeDestinationResolver.getTypeFromConfig(config); + assertEquals(expectedDestinationType, typeFromConfig); + } + + private static Stream destinationTypeToConfig() { + return Stream.of( + arguments("copy_gcs_config.json", DestinationType.COPY_GCS), + arguments("copy_s3_config.json", DestinationType.COPY_S3), + arguments("insert_config.json", DestinationType.INTERNAL_STAGING) + ); + } + + private List generateTestMessages() { return IntStream.range(0, 3) .boxed() diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeGCSStreamCopierTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeGCSStreamCopierTest.java index d0b3cf3fa36b..b014919448d7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeGCSStreamCopierTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeGCSStreamCopierTest.java @@ -14,7 +14,6 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.jdbc.SqlOperations; -import io.airbyte.integrations.destination.jdbc.StagingFilenameGenerator; import io.airbyte.integrations.destination.jdbc.copy.gcs.GcsConfig; import io.airbyte.protocol.models.DestinationSyncMode; import java.util.ArrayList; @@ -28,12 +27,12 @@ public class SnowflakeGCSStreamCopierTest { private SnowflakeGcsStreamCopier copier; @BeforeEach - public void setup() { + public void setup() throws Exception { Storage storageClient = mock(Storage.class, RETURNS_DEEP_STUBS); db = mock(JdbcDatabase.class); SqlOperations sqlOperations = mock(SqlOperations.class); - copier = new SnowflakeGcsStreamCopier( + copier = (SnowflakeGcsStreamCopier) new SnowflakeGcsStreamCopierFactory().create( "fake-staging-folder", DestinationSyncMode.OVERWRITE, "fake-schema", @@ -42,8 +41,7 @@ public void setup() { db, new GcsConfig("fake-project-id", "fake-bucket-name", "fake-credentials"), new ExtendedNameTransformer(), - sqlOperations, - new StagingFilenameGenerator("fake-stream", 256L)); + sqlOperations); } @Test @@ -53,8 +51,8 @@ public void copiesCorrectFilesToTable() throws Exception { } copier.copyStagingFileToTemporaryTable(); - List> partition = Lists.partition(new ArrayList<>(copier.getGcsStagingFiles()), 1000); - for (List files : partition) { + final List> partition = Lists.partition(new ArrayList<>(copier.getGcsStagingFiles()), 1000); + for (final List files : partition) { verify(db).execute(String.format( "COPY INTO fake-schema.%s FROM '%s' storage_integration = gcs_airbyte_integration " + " file_format = (type = csv field_delimiter = ',' skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = '\"') " diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java new file mode 100644 index 000000000000..cfb9cb5aaff6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.java @@ -0,0 +1,35 @@ +package io.airbyte.integrations.destination.snowflake; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +class SnowflakeInternalStagingSqlOperationsTest { + + public static final String SCHEMA_NAME = "schemaName"; + public static final String STAGE_NAME = "stageName"; + private final SnowflakeInternalStagingSqlOperations snowflakeStagingSqlOperations = new SnowflakeInternalStagingSqlOperations(new SnowflakeSQLNameTransformer()); + + @Test + void createStageIfNotExists() { + String actualCreateStageQuery = snowflakeStagingSqlOperations.getCreateStageQuery(STAGE_NAME); + String expectedCreateStageQuery = "CREATE STAGE IF NOT EXISTS " + STAGE_NAME + " encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');"; + assertEquals(expectedCreateStageQuery, actualCreateStageQuery); + } + + @Test + void copyIntoTmpTableFromStage() { + String expectedQuery = "COPY INTO schemaName.tableName FROM @stageName file_format = " + + "(type = csv field_delimiter = ',' skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = '\"')"; + String actualCopyQuery = snowflakeStagingSqlOperations.getCopyQuery(STAGE_NAME, "tableName", SCHEMA_NAME); + assertEquals(expectedQuery, actualCopyQuery); + } + + @Test + void dropStageIfExists() { + String expectedQuery = "DROP STAGE IF EXISTS " + STAGE_NAME + ";"; + String actualDropQuery = snowflakeStagingSqlOperations.getDropQuery(STAGE_NAME); + assertEquals(expectedQuery, actualDropQuery); + } + +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java index 7c3879edca57..8171f65b5202 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StreamCopierTest.java @@ -39,12 +39,12 @@ class SnowflakeS3StreamCopierTest { private SnowflakeS3StreamCopier copier; @BeforeEach - public void setup() { + public void setup() throws Exception { s3Client = mock(AmazonS3Client.class, RETURNS_DEEP_STUBS); db = mock(JdbcDatabase.class); sqlOperations = mock(SqlOperations.class); - copier = new SnowflakeS3StreamCopier( + copier = (SnowflakeS3StreamCopier) new SnowflakeS3StreamCopierFactory().create( // In reality, this is normally a UUID - see CopyConsumerFactory#createWriteConfigs "fake-staging-folder", "fake-schema", @@ -63,7 +63,6 @@ public void setup() { null)), new ExtendedNameTransformer(), sqlOperations, - UPLOAD_TIME, new ConfiguredAirbyteStream() .withDestinationSyncMode(DestinationSyncMode.APPEND) .withStream(new AirbyteStream() @@ -79,8 +78,8 @@ public void copiesCorrectFilesToTable() throws Exception { } copier.copyStagingFileToTemporaryTable(); - List> partition = Lists.partition(new ArrayList<>(copier.getStagingWritersByFile().keySet()), 1000); - for (List files : partition) { + final List> partition = Lists.partition(new ArrayList<>(copier.getStagingWritersByFile().keySet()), 1000); + for (final List files : partition) { verify(db).execute(String.format( "COPY INTO fake-schema.%s FROM '%s' " + "CREDENTIALS=(aws_key_id='fake-access-key-id' aws_secret_key='fake-secret-access-key') " diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java new file mode 100644 index 000000000000..c54cd05fa788 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/java/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.java @@ -0,0 +1,51 @@ +package io.airbyte.integrations.destination.snowflake; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import io.airbyte.commons.functional.CheckedConsumer; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import org.junit.jupiter.api.Test; + +class SnowflakeSqlOperationsTest { + + SnowflakeSqlOperations snowflakeSqlOperations = new SnowflakeSqlOperations(); + public static String SCHEMA_NAME = "schemaName"; + public static final String TABLE_NAME = "tableName"; + List records = new ArrayList<>(); + JdbcDatabase db = mock(JdbcDatabase.class); + + @Test + void createTableQuery() { + String expectedQuery = String.format( + "CREATE TABLE IF NOT EXISTS %s.%s ( \n" + + "%s VARCHAR PRIMARY KEY,\n" + + "%s VARIANT,\n" + + "%s TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp()\n" + + ") data_retention_time_in_days = 0;", + SCHEMA_NAME, TABLE_NAME, JavaBaseConstants.COLUMN_NAME_AB_ID, JavaBaseConstants.COLUMN_NAME_DATA, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + String actualQuery = snowflakeSqlOperations.createTableQuery(db, SCHEMA_NAME, TABLE_NAME); + assertEquals(expectedQuery, actualQuery); + } + + @Test + void isSchemaExists() throws Exception { + snowflakeSqlOperations.isSchemaExists(db, SCHEMA_NAME); + verify(db, times(1)).query(anyString()); + } + + @Test + void insertRecordsInternal() throws SQLException { + snowflakeSqlOperations.insertRecordsInternal(db, List.of(new AirbyteRecordMessage()), SCHEMA_NAME, TABLE_NAME); + verify(db, times(1)).execute(any(CheckedConsumer.class)); + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_gcs_config.json b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_gcs_config.json new file mode 100644 index 000000000000..b95bd0ede782 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_gcs_config.json @@ -0,0 +1,15 @@ +{ + "host": "test.us-east-2.aws", + "role": "AIRBYTE_ROLE", + "warehouse": "AIRBYTE_WAREHOUSE", + "database": "AIRBYTE_DATABASE", + "schema": "AIRBYTE_SCHEMA", + "username": "AIRBYTE_USERNAME_TEST", + "password": "test", + "loading_method": { + "method": "GCS Staging", + "project_id": "test", + "bucket_name": "test", + "credentials_json": "{\n\"type\": \"test\"}\n" + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json new file mode 100644 index 000000000000..8e41cce8879c --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/resources/copy_s3_config.json @@ -0,0 +1,17 @@ +{ + "host": "test.us-east-2.aws", + "role": "AIRBYTE_ROLE", + "warehouse": "AIRBYTE_WAREHOUSE", + "database": "AIRBYTE_DATABASE", + "schema": "AIRBYTE_SCHEMA", + "username": "AIRBYTE_USERNAME_TEST", + "password": "test", + "loading_method": { + "method": "S3 Staging", + "s3_bucket_name": "airbyte-snowflake-integration-tests", + "s3_bucket_region": "us-east-2", + "access_key_id": "test", + "secret_access_key": "test", + "part_size": 5 + } +} diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index dd43f2d85ec1..d199db377388 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -226,6 +226,7 @@ Finally, you need to add read/write permissions to your bucket with that email. | Version | Date | Pull Request | Subject | |:--------|:-----------| :----- | :------ | +| 0.4.19 | 2022-03-11 | [10699](https://github.com/airbytehq/airbyte/pull/10699) | Added unit tests | | 0.4.17 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | | 0.4.16 | 2022-02-25 | [\#10627](https://github.com/airbytehq/airbyte/pull/10627) | Add try catch to make sure all handlers are closed | | 0.4.15 | 2022-02-22 | [\#10459](https://github.com/airbytehq/airbyte/pull/10459) | Add FailureTrackingAirbyteMessageConsumer | From f031ec19b3878b312c595c53da177f142fe90e3b Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Fri, 11 Mar 2022 11:23:52 -0800 Subject: [PATCH 21/38] Revert "Revert "Remove the attemptId notion in the connectionManagerWorkflow (#10780)" (#11057)" (#11073) This reverts commit 892dc7ec66c876a71df14b4d9d8557f46df4314b. --- .../scheduler/persistence/JobPersistence.java | 6 +- airbyte-workers/build.gradle | 2 +- .../ConnectionManagerWorkflowImpl.java | 134 ++- .../activities/GenerateInputActivity.java | 17 + .../activities/GenerateInputActivityImpl.java | 8 + .../JobCreationAndStatusUpdateActivity.java | 70 ++ ...obCreationAndStatusUpdateActivityImpl.java | 43 + .../state/WorkflowInternalState.java | 2 +- .../ConnectionManagerWorkflowTest.java | 74 +- .../scheduling/WorkflowReplayingTest.java | 24 + ...obCreationAndStatusUpdateActivityTest.java | 52 + .../src/test/resources/workflowHistory.json | 939 ++++++++++++++++++ 12 files changed, 1304 insertions(+), 67 deletions(-) create mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java create mode 100644 airbyte-workers/src/test/resources/workflowHistory.json diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java index cda5b52f5864..20db344f2fd6 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java @@ -75,12 +75,12 @@ public interface JobPersistence { // /** - * Create a new attempt for a job. Throws {@link IllegalStateException} if the job is already in a - * terminal state. + * Create a new attempt for a job and return its attempt number. Throws + * {@link IllegalStateException} if the job is already in a terminal state. * * @param jobId job for which an attempt will be created * @param logPath path where logs should be written for the attempt - * @return id of the attempt + * @return The attempt number of the created attempt (see {@link DefaultJobPersistence}) * @throws IOException exception due to interaction with persistence */ int createAttempt(long jobId, Path logPath) throws IOException; diff --git a/airbyte-workers/build.gradle b/airbyte-workers/build.gradle index e37a5b9a4482..ea4d418ed7c0 100644 --- a/airbyte-workers/build.gradle +++ b/airbyte-workers/build.gradle @@ -33,7 +33,7 @@ dependencies { implementation project(':airbyte-scheduler:models') testImplementation 'io.temporal:temporal-testing:1.8.1' - testImplementation 'io.temporal:temporal-testing-junit5:1.5.0' // versioned separately from rest of temporal + testImplementation 'com.jayway.jsonpath:json-path:2.7.0' testImplementation "org.flywaydb:flyway-core:7.14.0" testImplementation 'org.mockito:mockito-inline:4.0.0' testImplementation 'org.postgresql:postgresql:42.2.18' diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index c654f034880b..16ecec71a2fb 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -21,15 +21,20 @@ import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; +import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.ReportJobStartInput; import io.airbyte.workers.temporal.scheduling.shared.ActivityConfiguration; import io.airbyte.workers.temporal.scheduling.state.WorkflowInternalState; @@ -60,6 +65,9 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow private static final int TASK_QUEUE_CHANGE_CURRENT_VERSION = 1; private static final int AUTO_DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION = 1; + private static final String RENAME_ATTEMPT_ID_TO_NUMBER_TAG = "rename_attempt_id_to_number"; + private static final int RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION = 1; + private WorkflowState workflowState = new WorkflowState(UUID.randomUUID(), new NoopStateListener()); private final WorkflowInternalState workflowInternalState = new WorkflowInternalState(); @@ -147,7 +155,7 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn workflowInternalState.setJobId(getOrCreateJobId(connectionUpdaterInput)); - workflowInternalState.setAttemptId(createAttemptId(workflowInternalState.getJobId())); + workflowInternalState.setAttemptNumber(createAttempt(workflowInternalState.getJobId())); final GeneratedJobInput jobInputs = getJobInput(); @@ -183,13 +191,13 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn af.getActivityType(), af.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptId())); + workflowInternalState.getAttemptNumber())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } else { workflowInternalState.getFailures().add( FailureHelper.unknownOriginFailure(childWorkflowFailure.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptId())); + workflowInternalState.getAttemptNumber())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } @@ -199,20 +207,41 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn private void reportSuccess(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { workflowState.setSuccess(true); - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptId(), - standardSyncOutput)); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput)); + } else { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccessWithAttemptNumber, new JobSuccessInputWithAttemptNumber( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput)); + } resetNewConnectionInput(connectionUpdaterInput); } private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptId(), - standardSyncOutput, - FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput, + FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); + } else { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailureWithAttemptNumber, new AttemptNumberFailureInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptNumber(), + standardSyncOutput, + FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); + } final int maxAttempt = configFetchActivity.getMaxAttempt().getMaxAttempt(); final int attemptNumber = connectionUpdaterInput.getAttemptNumber(); @@ -306,20 +335,20 @@ public WorkflowState getState() { @Override public JobInformation getJobInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); return new JobInformation( jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId); + attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber); } @Override public QuarantinedInformation getQuarantinedInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); return new QuarantinedInformation( connectionId, jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId, + attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber, workflowState.isQuarantined()); } @@ -419,15 +448,31 @@ private Long getOrCreateJobId(final ConnectionUpdaterInput connectionUpdaterInpu /** * Create a new attempt for a given jobId + * + * @param jobId - the jobId associated with the new attempt + * + * @return The attempt number */ - private Integer createAttemptId(final long jobId) { - final AttemptCreationOutput attemptCreationOutput = + private Integer createAttempt(final long jobId) { + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + // Retrieve the attempt number but name it attempt id + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + final AttemptCreationOutput attemptCreationOutput = + runMandatoryActivityWithOutput( + jobCreationAndStatusUpdateActivity::createNewAttempt, + new AttemptCreationInput( + jobId)); + return attemptCreationOutput.getAttemptId(); + } + + final AttemptNumberCreationOutput attemptNumberCreationOutput = runMandatoryActivityWithOutput( - jobCreationAndStatusUpdateActivity::createNewAttempt, + jobCreationAndStatusUpdateActivity::createNewAttemptNumber, new AttemptCreationInput( jobId)); - - return attemptCreationOutput.getAttemptId(); + return attemptNumberCreationOutput.getAttemptNumber(); } /** @@ -436,14 +481,30 @@ private Integer createAttemptId(final long jobId) { */ private GeneratedJobInput getJobInput() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); - final SyncInput getSyncInputActivitySyncInput = new SyncInput( - attemptId, + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + final SyncInput getSyncInputActivitySyncInput = new SyncInput( + attemptNumber, + jobId, + workflowState.isResetConnection()); + + final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( + getSyncInputActivity::getSyncWorkflowInput, + getSyncInputActivitySyncInput); + + return syncWorkflowInputs; + } + + final SyncInputWithAttemptNumber getSyncInputActivitySyncInput = new SyncInputWithAttemptNumber( + attemptNumber, jobId, workflowState.isResetConnection()); final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( - getSyncInputActivity::getSyncWorkflowInput, + getSyncInputActivity::getSyncWorkflowInputWithAttemptNumber, getSyncInputActivitySyncInput); return syncWorkflowInputs; @@ -529,14 +590,25 @@ private void deleteConnectionBeforeTerminatingTheWorkflow() { private void reportCancelledAndContinueWith(final boolean isReset, final ConnectionUpdaterInput connectionUpdaterInput) { workflowState.setContinueAsReset(isReset); final Long jobId = workflowInternalState.getJobId(); - final Integer attemptId = workflowInternalState.getAttemptId(); + final Integer attemptNumber = workflowInternalState.getAttemptNumber(); final Set failures = workflowInternalState.getFailures(); final Boolean partialSuccess = workflowInternalState.getPartialSuccess(); - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, - new JobCancelledInput( - jobId, - attemptId, - FailureHelper.failureSummaryForCancellation(jobId, attemptId, failures, partialSuccess))); + final int attemptCreationVersion = + Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); + + if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, + new JobCancelledInput( + jobId, + attemptNumber, + FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); + } else { + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelledWithAttemptNumber, + new JobCancelledInputWithAttemptNumber( + jobId, + attemptNumber, + FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); + } resetNewConnectionInput(connectionUpdaterInput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java index 0a1ed70c3008..60191311ef34 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java @@ -27,6 +27,17 @@ class SyncInput { } + @Data + @NoArgsConstructor + @AllArgsConstructor + class SyncInputWithAttemptNumber { + + private int attemptNumber; + private long jobId; + private boolean reset; + + } + @Data @NoArgsConstructor @AllArgsConstructor @@ -45,4 +56,10 @@ class GeneratedJobInput { @ActivityMethod GeneratedJobInput getSyncWorkflowInput(SyncInput input); + /** + * This generate the input needed by the child sync workflow + */ + @ActivityMethod + GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(SyncInputWithAttemptNumber input); + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java index 30edaf108adc..bcb939d022c8 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java @@ -76,4 +76,12 @@ public GeneratedJobInput getSyncWorkflowInput(final SyncInput input) { } } + @Override + public GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(final SyncInputWithAttemptNumber input) { + return getSyncWorkflowInput(new SyncInput( + input.getAttemptNumber(), + input.getJobId(), + input.isReset())); + } + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java index aa45b53b0e8c..2de587c2a15b 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java @@ -72,6 +72,24 @@ class AttemptCreationOutput { @ActivityMethod AttemptCreationOutput createNewAttempt(AttemptCreationInput input) throws RetryableException; + @Data + @NoArgsConstructor + @AllArgsConstructor + class AttemptNumberCreationOutput { + + private int attemptNumber; + + } + + /** + * Create a new attempt for a given job ID + * + * @param input POJO containing the jobId + * @return A POJO containing the attemptNumber + */ + @ActivityMethod + AttemptNumberCreationOutput createNewAttemptNumber(AttemptCreationInput input) throws RetryableException; + @Data @NoArgsConstructor @AllArgsConstructor @@ -89,6 +107,23 @@ class JobSuccessInput { @ActivityMethod void jobSuccess(JobSuccessInput input); + @Data + @NoArgsConstructor + @AllArgsConstructor + class JobSuccessInputWithAttemptNumber { + + private long jobId; + private int attemptNumber; + private StandardSyncOutput standardSyncOutput; + + } + + /** + * Set a job status as successful + */ + @ActivityMethod + void jobSuccessWithAttemptNumber(JobSuccessInputWithAttemptNumber input); + @Data @NoArgsConstructor @AllArgsConstructor @@ -123,6 +158,24 @@ class AttemptFailureInput { @ActivityMethod void attemptFailure(AttemptFailureInput input); + @Data + @NoArgsConstructor + @AllArgsConstructor + class AttemptNumberFailureInput { + + private long jobId; + private int attemptNumber; + private StandardSyncOutput standardSyncOutput; + private AttemptFailureSummary attemptFailureSummary; + + } + + /** + * Set an attempt status as failed + */ + @ActivityMethod + void attemptFailureWithAttemptNumber(AttemptNumberFailureInput input); + @Data @NoArgsConstructor @AllArgsConstructor @@ -140,6 +193,23 @@ class JobCancelledInput { @ActivityMethod void jobCancelled(JobCancelledInput input); + @Data + @NoArgsConstructor + @AllArgsConstructor + class JobCancelledInputWithAttemptNumber { + + private long jobId; + private int attemptNumber; + private AttemptFailureSummary attemptFailureSummary; + + } + + /** + * Set a job status as cancelled + */ + @ActivityMethod + void jobCancelledWithAttemptNumber(JobCancelledInputWithAttemptNumber input); + @Data @NoArgsConstructor @AllArgsConstructor diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java index e02796675174..b9a76065e776 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java @@ -128,6 +128,24 @@ public AttemptCreationOutput createNewAttempt(final AttemptCreationInput input) } } + @Override + public AttemptNumberCreationOutput createNewAttemptNumber(final AttemptCreationInput input) throws RetryableException { + try { + final long jobId = input.getJobId(); + final Job createdJob = jobPersistence.getJob(jobId); + + final WorkerRun workerRun = temporalWorkerRunFactory.create(createdJob); + final Path logFilePath = workerRun.getJobRoot().resolve(LogClientSingleton.LOG_FILENAME); + final int persistedAttemptNumber = jobPersistence.createAttempt(jobId, logFilePath); + emitJobIdToReleaseStagesMetric(MetricsRegistry.ATTEMPT_CREATED_BY_RELEASE_STAGE, jobId); + + LogClientSingleton.getInstance().setJobMdc(workerEnvironment, logConfigs, workerRun.getJobRoot()); + return new AttemptNumberCreationOutput(persistedAttemptNumber); + } catch (final IOException e) { + throw new RetryableException(e); + } + } + @Override public void jobSuccess(final JobSuccessInput input) { try { @@ -152,6 +170,14 @@ public void jobSuccess(final JobSuccessInput input) { } } + @Override + public void jobSuccessWithAttemptNumber(final JobSuccessInputWithAttemptNumber input) { + jobSuccess(new JobSuccessInput( + input.getJobId(), + input.getAttemptNumber(), + input.getStandardSyncOutput())); + } + @Override public void jobFailure(final JobFailureInput input) { try { @@ -191,6 +217,15 @@ public void attemptFailure(final AttemptFailureInput input) { } } + @Override + public void attemptFailureWithAttemptNumber(final AttemptNumberFailureInput input) { + attemptFailure(new AttemptFailureInput( + input.getJobId(), + input.getAttemptNumber(), + input.getStandardSyncOutput(), + input.getAttemptFailureSummary())); + } + @Override public void jobCancelled(final JobCancelledInput input) { try { @@ -209,6 +244,14 @@ public void jobCancelled(final JobCancelledInput input) { } } + @Override + public void jobCancelledWithAttemptNumber(final JobCancelledInputWithAttemptNumber input) { + jobCancelled(new JobCancelledInput( + input.getJobId(), + input.getAttemptNumber(), + input.getAttemptFailureSummary())); + } + @Override public void reportJobStart(final ReportJobStartInput input) { try { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java index 822bbfe0ba48..d9bdcb2d5800 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java @@ -17,7 +17,7 @@ public class WorkflowInternalState { private Long jobId = null; - private Integer attemptId = null; + private Integer attemptNumber = null; // StandardSyncOutput standardSyncOutput = null; private final Set failures = new HashSet<>(); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index ca897b5afe80..8353b9116940 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -16,13 +16,14 @@ import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; -import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; +import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.state.WorkflowState; import io.airbyte.workers.temporal.scheduling.state.listener.TestStateListener; import io.airbyte.workers.temporal.scheduling.state.listener.WorkflowStateChangedListener.ChangedStateEvent; @@ -121,11 +122,11 @@ public void setUp() { .thenReturn(new JobCreationOutput( 1L)); - Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) - .thenReturn(new AttemptCreationOutput( + Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) + .thenReturn(new AttemptNumberCreationOutput( 1)); - Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any(SyncInput.class))) + Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) .thenReturn( new GeneratedJobInput( new JobRunConfig(), @@ -510,7 +511,8 @@ public void cancelRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.CANCELLED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .jobCancelledWithAttemptNumber(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); } @RepeatedTest(10) @@ -594,7 +596,7 @@ public void resetCancelRunningWorkflow() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.RESET && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)); } @@ -687,7 +689,7 @@ public void updatedSignalReceivedWhileRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.UPDATED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)); } } @@ -747,8 +749,10 @@ public void testSourceAndDestinationFailuresRecorded() throws InterruptedExcepti workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); } @RepeatedTest(10) @@ -783,7 +787,8 @@ public void testNormalizationFailure() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); } @RepeatedTest(10) @@ -818,7 +823,8 @@ public void testDbtFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); } @RepeatedTest(10) @@ -853,7 +859,8 @@ public void testPersistenceFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); } @RepeatedTest(10) @@ -888,7 +895,8 @@ public void testReplicationFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity) + .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); } } @@ -906,12 +914,13 @@ public static Stream getSetupFailingFailingActivityBeforeRun() { return Stream.of( Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewJob(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), - Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) + Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), Arguments.of(new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mJobCreationAndStatusUpdateActivity).reportJobStart(Mockito.any()))), - Arguments.of(new Thread(() -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any())) - .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); + Arguments.of(new Thread( + () -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) + .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); } @ParameterizedTest @@ -996,10 +1005,10 @@ public static Stream getSetupFailingFailingActivityAfterRun() { return Stream.of( Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> System.out.println("do Nothing")), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()))), + .when(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.cancelJob()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()))), + .when(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.deleteConnection()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mConnectionDeletionActivity).deleteConnection(Mockito.any())))); @@ -1044,7 +1053,7 @@ void testGetStuckAfterRun(final Consumer signalSender Assertions.assertThat(events) .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.QUARANTINED && changedStateEvent.isValue()) - .hasSize(1); + .hasSizeGreaterThanOrEqualTo(1); } } @@ -1127,7 +1136,10 @@ public void failedResetContinueAsReset() throws InterruptedException { @DisplayName("Test that we are getting stuck if the report of a failure happen") void testGetStuckAfterRun() throws InterruptedException { Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.any()); + .when(mJobCreationAndStatusUpdateActivity).attemptFailureWithAttemptNumber(Mockito.any()); + + Mockito.when(mConfigFetchActivity.getMaxAttempt()) + .thenReturn(new GetMaxAttemptOutput(3)); final UUID testId = UUID.randomUUID(); final TestStateListener testStateListener = new TestStateListener(); @@ -1206,7 +1218,7 @@ public void failedResetJobWaitsOnRestart() throws InterruptedException { } - private class HasFailureFromOrigin implements ArgumentMatcher { + private class HasFailureFromOrigin implements ArgumentMatcher { private final FailureOrigin expectedFailureOrigin; @@ -1215,26 +1227,26 @@ public HasFailureFromOrigin(final FailureOrigin failureOrigin) { } @Override - public boolean matches(final AttemptFailureInput arg) { + public boolean matches(final AttemptNumberFailureInput arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureOrigin().equals(expectedFailureOrigin)); } } - private class HasCancellationFailure implements ArgumentMatcher { + private class HasCancellationFailure implements ArgumentMatcher { private final long expectedJobId; - private final int expectedAttemptId; + private final int expectedAttemptNumber; - public HasCancellationFailure(final long jobId, final int attemptId) { + public HasCancellationFailure(final long jobId, final int attemptNumber) { this.expectedJobId = jobId; - this.expectedAttemptId = attemptId; + this.expectedAttemptNumber = attemptNumber; } @Override - public boolean matches(final JobCancelledInput arg) { + public boolean matches(final JobCancelledInputWithAttemptNumber arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureType().equals(FailureType.MANUAL_CANCELLATION)) - && arg.getJobId() == expectedJobId && arg.getAttemptId() == expectedAttemptId; + && arg.getJobId() == expectedJobId && arg.getAttemptNumber() == expectedAttemptNumber; } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java new file mode 100644 index 000000000000..bba04f6cea42 --- /dev/null +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling; + +import io.temporal.testing.WorkflowReplayer; +import java.io.File; +import java.net.URL; +import org.junit.jupiter.api.Test; + +// TODO: Auto generation of the input and more scenario coverage +public class WorkflowReplayingTest { + + @Test + public void replaySimpleSuccessfulWorkflow() throws Exception { + final URL historyPath = getClass().getClassLoader().getResource("workflowHistory.json"); + + final File historyFile = new File(historyPath.toURI()); + + WorkflowReplayer.replayWorkflowExecution(historyFile, ConnectionManagerWorkflowImpl.class); + } + +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java index 5f480cf4fb80..f643bc4bd07e 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java @@ -29,6 +29,7 @@ import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; @@ -84,6 +85,7 @@ public class JobCreationAndStatusUpdateActivityTest { private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final long JOB_ID = 123L; private static final int ATTEMPT_ID = 0; + private static final int ATTEMPT_NUMBER = 1; private static final StandardSyncOutput standardSyncOutput = new StandardSyncOutput() .withStandardSyncSummary( new StandardSyncSummary() @@ -164,6 +166,56 @@ public void createAttemptThrowException() throws IOException { .hasCauseInstanceOf(IOException.class); } + @Test + @DisplayName("Test attempt creation") + public void createAttemptNumber() throws IOException { + Mockito.when(mConfigRepository.getDatabase()).thenReturn(Mockito.mock(ExceptionWrappingDatabase.class)); + + final Job mJob = Mockito.mock(Job.class); + + Mockito.when(mJobPersistence.getJob(JOB_ID)) + .thenReturn(mJob); + + final WorkerRun mWorkerRun = Mockito.mock(WorkerRun.class); + + Mockito.when(mTemporalWorkerRunFactory.create(mJob)) + .thenReturn(mWorkerRun); + + final Path mPath = Mockito.mock(Path.class); + final Path path = Path.of("test"); + Mockito.when(mPath.resolve(Mockito.anyString())) + .thenReturn(path); + Mockito.when(mWorkerRun.getJobRoot()) + .thenReturn(mPath); + + Mockito.when(mJobPersistence.createAttempt(JOB_ID, path)) + .thenReturn(ATTEMPT_NUMBER); + + final LogClientSingleton mLogClientSingleton = Mockito.mock(LogClientSingleton.class); + try (final MockedStatic utilities = Mockito.mockStatic(LogClientSingleton.class)) { + utilities.when(() -> LogClientSingleton.getInstance()) + .thenReturn(mLogClientSingleton); + + final AttemptNumberCreationOutput output = jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( + JOB_ID)); + + Mockito.verify(mLogClientSingleton).setJobMdc(mWorkerEnvironment, mLogConfigs, mPath); + Assertions.assertThat(output.getAttemptNumber()).isEqualTo(ATTEMPT_NUMBER); + } + } + + @Test + @DisplayName("Test exception errors are properly wrapped") + public void createAttemptNumberThrowException() throws IOException { + Mockito.when(mJobPersistence.getJob(JOB_ID)) + .thenThrow(new IOException()); + + Assertions.assertThatThrownBy(() -> jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( + JOB_ID))) + .isInstanceOf(RetryableException.class) + .hasCauseInstanceOf(IOException.class); + } + } @Nested diff --git a/airbyte-workers/src/test/resources/workflowHistory.json b/airbyte-workers/src/test/resources/workflowHistory.json new file mode 100644 index 000000000000..a83619c8ea29 --- /dev/null +++ b/airbyte-workers/src/test/resources/workflowHistory.json @@ -0,0 +1,939 @@ +{ + "events": [ + { + "eventId": "1", + "eventTime": "2022-03-08T22:47:57.534705300Z", + "eventType": "WorkflowExecutionStarted", + "taskId": "1048664", + "workflowExecutionStartedEventAttributes": { + "workflowType": { + "name": "ConnectionManagerWorkflow" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" + } + ] + }, + "workflowExecutionTimeout": "0s", + "workflowRunTimeout": "0s", + "workflowTaskTimeout": "10s", + "originalExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", + "identity": "1@3de809efb2ed", + "firstExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", + "retryPolicy": { + "initialInterval": "1s", + "backoffCoefficient": 2, + "maximumInterval": "100s", + "maximumAttempts": 1 + }, + "attempt": 1, + "firstWorkflowTaskBackoff": "0s", + "header": {} + } + }, + { + "eventId": "2", + "eventTime": "2022-03-08T22:47:57.534975800Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048665", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "3", + "eventTime": "2022-03-08T22:47:57.563121800Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048669", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "2", + "identity": "1@2741f9c3f558", + "requestId": "e9cf205d-de02-4139-b00d-fab56c4b9fd7" + } + }, + { + "eventId": "4", + "eventTime": "2022-03-08T22:47:57.646973200Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048672", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "2", + "startedEventId": "3", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "5", + "eventTime": "2022-03-08T22:47:57.647029Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048673", + "activityTaskScheduledEventAttributes": { + "activityId": "c45be44b-784b-3a0c-9473-e80129b65969", + "activityType": { + "name": "GetTimeToWait" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YifQ==" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "4", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "6", + "eventTime": "2022-03-08T22:47:57.668813100Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048677", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "5", + "identity": "1@2741f9c3f558", + "requestId": "df9001bd-0c42-4415-a631-0a37ee3f7698", + "attempt": 1 + } + }, + { + "eventId": "7", + "eventTime": "2022-03-08T22:47:57.856240400Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048678", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJ0aW1lVG9XYWl0IjowLjB9" + } + ] + }, + "scheduledEventId": "5", + "startedEventId": "6", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "8", + "eventTime": "2022-03-08T22:47:57.856293200Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048679", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "9", + "eventTime": "2022-03-08T22:47:57.876328300Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048683", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "8", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "80932560-7d5d-4f5f-9982-561857b07f50" + } + }, + { + "eventId": "10", + "eventTime": "2022-03-08T22:47:57.921753Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048686", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "8", + "startedEventId": "9", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "11", + "eventTime": "2022-03-08T22:47:57.921814200Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048687", + "activityTaskScheduledEventAttributes": { + "activityId": "c7b20a16-db46-3dd1-b8ac-e2a93d3a8e0d", + "activityType": { + "name": "CreateNewJob" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJyZXNldCI6ZmFsc2V9" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "10", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "12", + "eventTime": "2022-03-08T22:47:57.942341Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048691", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "11", + "identity": "1@2741f9c3f558", + "requestId": "29e77ce9-f715-4f19-9fe2-b5b94201d0b3", + "attempt": 1 + } + }, + { + "eventId": "13", + "eventTime": "2022-03-08T22:47:58.268669700Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048692", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MX0=" + } + ] + }, + "scheduledEventId": "11", + "startedEventId": "12", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "14", + "eventTime": "2022-03-08T22:47:58.268723100Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048693", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "15", + "eventTime": "2022-03-08T22:47:58.283301600Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048697", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "14", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "18d2f139-0794-4dfb-b36d-1448df3eb350" + } + }, + { + "eventId": "16", + "eventTime": "2022-03-08T22:47:58.302388600Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048700", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "14", + "startedEventId": "15", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "17", + "eventTime": "2022-03-08T22:47:58.302431Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048701", + "activityTaskScheduledEventAttributes": { + "activityId": "3c1b8fa0-437b-3bc2-a365-352e9a5d765d", + "activityType": { + "name": "CreateNewAttempt" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MX0=" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "16", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "18", + "eventTime": "2022-03-08T22:47:58.314956300Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048705", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "17", + "identity": "1@2741f9c3f558", + "requestId": "0221b660-4f40-4bcf-9e6a-2a9d5898bb91", + "attempt": 1 + } + }, + { + "eventId": "19", + "eventTime": "2022-03-08T22:47:58.400030800Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048706", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJhdHRlbXB0SWQiOjB9" + } + ] + }, + "scheduledEventId": "17", + "startedEventId": "18", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "20", + "eventTime": "2022-03-08T22:47:58.400072800Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048707", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "21", + "eventTime": "2022-03-08T22:47:58.414415400Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048711", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "20", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "e4110bad-579c-4ac2-a3eb-3836d7d6f841" + } + }, + { + "eventId": "22", + "eventTime": "2022-03-08T22:47:58.431563800Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048714", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "20", + "startedEventId": "21", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "23", + "eventTime": "2022-03-08T22:47:58.431607100Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048715", + "activityTaskScheduledEventAttributes": { + "activityId": "5aa065bf-5ef2-3e24-b560-c6b3c1f4e2bc", + "activityType": { + "name": "GetSyncWorkflowInput" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJhdHRlbXB0SWQiOjAsImpvYklkIjoxLCJyZXNldCI6ZmFsc2V9" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "22", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "24", + "eventTime": "2022-03-08T22:47:58.445218800Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048719", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "23", + "identity": "1@2741f9c3f558", + "requestId": "7165cc86-d137-4b0f-906b-a7e52a1074e4", + "attempt": 1 + } + }, + { + "eventId": "25", + "eventTime": "2022-03-08T22:47:58.471038600Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048720", + "activityTaskCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JSdW5Db25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowfSwic291cmNlTGF1bmNoZXJDb25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowLCJkb2NrZXJJbWFnZSI6ImFpcmJ5dGUvc291cmNlLXBva2VhcGk6MC4xLjQifSwiZGVzdGluYXRpb25MYXVuY2hlckNvbmZpZyI6eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9LCJzeW5jSW5wdXQiOnsibmFtZXNwYWNlRGVmaW5pdGlvbiI6ImRlc3RpbmF0aW9uIiwibmFtZXNwYWNlRm9ybWF0IjoiJHtTT1VSQ0VfTkFNRVNQQUNFfSIsInByZWZpeCI6IiIsInNvdXJjZUNvbmZpZ3VyYXRpb24iOnsicG9rZW1vbl9uYW1lIjoiZGl0dG8ifSwiZGVzdGluYXRpb25Db25maWd1cmF0aW9uIjp7ImRlc3RpbmF0aW9uX3BhdGgiOiIvdG1wIn0sIm9wZXJhdGlvblNlcXVlbmNlIjpbXSwiY2F0YWxvZyI6eyJzdHJlYW1zIjpbeyJzdHJlYW0iOnsibmFtZSI6InBva2Vtb24iLCJqc29uX3NjaGVtYSI6eyJ0eXBlIjoib2JqZWN0IiwiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInByb3BlcnRpZXMiOnsiaWQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZvcm1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19LCJtb3ZlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJtb3ZlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZ3JvdXBfZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uX2dyb3VwIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImxldmVsX2xlYXJuZWRfYXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibW92ZV9sZWFybl9tZXRob2QiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwib3JkZXIiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3RhdHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJlZmZvcnQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYmFzZV9zdGF0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sInR5cGVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidHlwZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19LCJoZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwid2VpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInNwZWNpZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3ByaXRlcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJiYWNrX3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiYWJpbGl0aWVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYWJpbGl0eSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJpc19oaWRkZW4iOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfX19fSwiaGVsZF9pdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJpdGVtIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJyYXJpdHkiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJpc19kZWZhdWx0ICI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19LCJnYW1lX2luZGljZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJnYW1lX2luZGV4Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sImJhc2VfZXhwZXJpZW5jZSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJsb2NhdGlvbl9hcmVhX2VuY291bnRlcnMiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXX0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6ImFwcGVuZCIsInByaW1hcnlfa2V5IjpbXX1dfSwicmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9LCJzb3VyY2VSZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sImRlc3RpbmF0aW9uUmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9fX0=" + } + ] + }, + "scheduledEventId": "23", + "startedEventId": "24", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "26", + "eventTime": "2022-03-08T22:47:58.471218800Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048721", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "27", + "eventTime": "2022-03-08T22:47:58.485851600Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048725", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "26", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "289a7723-efac-4cfa-bad2-f0c022b27421" + } + }, + { + "eventId": "28", + "eventTime": "2022-03-08T22:47:58.513022200Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048728", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "26", + "startedEventId": "27", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "29", + "eventTime": "2022-03-08T22:47:58.513073500Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048729", + "activityTaskScheduledEventAttributes": { + "activityId": "95686aea-a2ac-3e1e-a512-0790d3a4e95f", + "activityType": { + "name": "ReportJobStart" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MX0=" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "28", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "30", + "eventTime": "2022-03-08T22:47:58.528653400Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048733", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "29", + "identity": "1@2741f9c3f558", + "requestId": "0e4b03aa-2493-4f7d-b832-4e98e13551da", + "attempt": 1 + } + }, + { + "eventId": "31", + "eventTime": "2022-03-08T22:47:58.668827900Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048734", + "activityTaskCompletedEventAttributes": { + "scheduledEventId": "29", + "startedEventId": "30", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "32", + "eventTime": "2022-03-08T22:47:58.668874600Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048735", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "33", + "eventTime": "2022-03-08T22:47:58.682929200Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048739", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "32", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "caa61b0a-5d34-48a8-ab16-997d3ba9eab5" + } + }, + { + "eventId": "34", + "eventTime": "2022-03-08T22:47:58.735519100Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048742", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "32", + "startedEventId": "33", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "35", + "eventTime": "2022-03-08T22:47:58.735579100Z", + "eventType": "MarkerRecorded", + "taskId": "1048743", + "markerRecordedEventAttributes": { + "markerName": "Version", + "details": { + "changeId": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "InRhc2tfcXVldWVfY2hhbmdlX2Zyb21fY29ubmVjdGlvbl91cGRhdGVyX3RvX3N5bmMi" + } + ] + }, + "version": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "MQ==" + } + ] + } + }, + "workflowTaskCompletedEventId": "34" + } + }, + { + "eventId": "36", + "eventTime": "2022-03-08T22:47:58.735800Z", + "eventType": "StartChildWorkflowExecutionInitiated", + "taskId": "1048744", + "startChildWorkflowExecutionInitiatedEventAttributes": { + "namespace": "default", + "workflowId": "sync_1", + "workflowType": { + "name": "SyncWorkflow" + }, + "taskQueue": { + "name": "SYNC" + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjB9" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9zb3VyY2UtcG9rZWFwaTowLjEuNCJ9" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJuYW1lc3BhY2VEZWZpbml0aW9uIjoiZGVzdGluYXRpb24iLCJuYW1lc3BhY2VGb3JtYXQiOiIke1NPVVJDRV9OQU1FU1BBQ0V9IiwicHJlZml4IjoiIiwic291cmNlQ29uZmlndXJhdGlvbiI6eyJwb2tlbW9uX25hbWUiOiJkaXR0byJ9LCJkZXN0aW5hdGlvbkNvbmZpZ3VyYXRpb24iOnsiZGVzdGluYXRpb25fcGF0aCI6Ii90bXAifSwib3BlcmF0aW9uU2VxdWVuY2UiOltdLCJjYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJyZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sInNvdXJjZVJlc291cmNlUmVxdWlyZW1lbnRzIjp7fSwiZGVzdGluYXRpb25SZXNvdXJjZVJlcXVpcmVtZW50cyI6e319" + }, + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "ImI5MTZmZDg2LWE1YTYtNDhhYy04ODgwLTQ5Nzg3NGNmNTNjZiI=" + } + ] + }, + "workflowExecutionTimeout": "0s", + "workflowRunTimeout": "0s", + "workflowTaskTimeout": "10s", + "parentClosePolicy": "RequestCancel", + "workflowTaskCompletedEventId": "34", + "workflowIdReusePolicy": "AllowDuplicate", + "header": {} + } + }, + { + "eventId": "37", + "eventTime": "2022-03-08T22:47:58.762930500Z", + "eventType": "ChildWorkflowExecutionStarted", + "taskId": "1048747", + "childWorkflowExecutionStartedEventAttributes": { + "namespace": "default", + "initiatedEventId": "36", + "workflowExecution": { + "workflowId": "sync_1", + "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" + }, + "workflowType": { + "name": "SyncWorkflow" + }, + "header": {} + } + }, + { + "eventId": "38", + "eventTime": "2022-03-08T22:47:58.762971900Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048748", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "39", + "eventTime": "2022-03-08T22:47:58.774051900Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048752", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "38", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "ca98144b-47f8-486d-b260-2e1dc42bd68e" + } + }, + { + "eventId": "40", + "eventTime": "2022-03-08T22:47:58.794905100Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048755", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "38", + "startedEventId": "39", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "41", + "eventTime": "2022-03-08T22:48:02.408058300Z", + "eventType": "ChildWorkflowExecutionCompleted", + "taskId": "1048757", + "childWorkflowExecutionCompletedEventAttributes": { + "result": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJzdGFuZGFyZFN5bmNTdW1tYXJ5Ijp7InN0YXR1cyI6ImNvbXBsZXRlZCIsInJlY29yZHNTeW5jZWQiOjEsImJ5dGVzU3luY2VkIjoyMjcxNSwic3RhcnRUaW1lIjoxNjQ2Nzc5Njc4OTM5LCJlbmRUaW1lIjoxNjQ2Nzc5NjgyMjM4LCJ0b3RhbFN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJzdGF0ZU1lc3NhZ2VzRW1pdHRlZCI6MCwicmVjb3Jkc0NvbW1pdHRlZCI6MX0sInN0cmVhbVN0YXRzIjpbeyJzdHJlYW1OYW1lIjoicG9rZW1vbiIsInN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJyZWNvcmRzQ29tbWl0dGVkIjoxfX1dfSwib3V0cHV0X2NhdGFsb2ciOnsic3RyZWFtcyI6W3sic3RyZWFtIjp7Im5hbWUiOiJwb2tlbW9uIiwianNvbl9zY2hlbWEiOnsidHlwZSI6Im9iamVjdCIsIiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJwcm9wZXJ0aWVzIjp7ImlkIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmb3JtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fSwibW92ZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsibW92ZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2dyb3VwX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbl9ncm91cCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJsZXZlbF9sZWFybmVkX2F0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm1vdmVfbGVhcm5fbWV0aG9kIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sIm9yZGVyIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInN0YXRzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InN0YXQiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZWZmb3J0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImJhc2Vfc3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJ0eXBlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInR5cGUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fSwiaGVpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIndlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzcGVjaWVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInNwcml0ZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiYmFja19zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImFiaWxpdGllcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImFiaWxpdHkiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiaXNfaGlkZGVuIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX19fX0sImhlbGRfaXRlbXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiaXRlbSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsicmFyaXR5Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwiaXNfZGVmYXVsdCAiOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfSwiZ2FtZV9pbmRpY2VzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZ2FtZV9pbmRleCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJiYXNlX2V4cGVyaWVuY2UiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibG9jYXRpb25fYXJlYV9lbmNvdW50ZXJzIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W119LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJhcHBlbmQiLCJwcmltYXJ5X2tleSI6W119XX0sImZhaWx1cmVzIjpbXX0=" + } + ] + }, + "namespace": "default", + "workflowExecution": { + "workflowId": "sync_1", + "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" + }, + "workflowType": { + "name": "SyncWorkflow" + }, + "initiatedEventId": "36", + "startedEventId": "37" + } + }, + { + "eventId": "42", + "eventTime": "2022-03-08T22:48:02.408127200Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048758", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "43", + "eventTime": "2022-03-08T22:48:02.422112800Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048762", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "42", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "c0916d42-c83c-4e9d-805a-29ca5f979624" + } + }, + { + "eventId": "44", + "eventTime": "2022-03-08T22:48:02.454203Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048765", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "42", + "startedEventId": "43", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "45", + "eventTime": "2022-03-08T22:48:02.454256Z", + "eventType": "ActivityTaskScheduled", + "taskId": "1048766", + "activityTaskScheduledEventAttributes": { + "activityId": "b169a729-47bc-38f7-a315-c1a4b6d96466", + "activityType": { + "name": "JobSuccess" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "header": {}, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJqb2JJZCI6MSwiYXR0ZW1wdElkIjowLCJzdGFuZGFyZFN5bmNPdXRwdXQiOnsic3RhbmRhcmRTeW5jU3VtbWFyeSI6eyJzdGF0dXMiOiJjb21wbGV0ZWQiLCJyZWNvcmRzU3luY2VkIjoxLCJieXRlc1N5bmNlZCI6MjI3MTUsInN0YXJ0VGltZSI6MTY0Njc3OTY3ODkzOSwiZW5kVGltZSI6MTY0Njc3OTY4MjIzOCwidG90YWxTdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwic3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjAsInJlY29yZHNDb21taXR0ZWQiOjF9LCJzdHJlYW1TdGF0cyI6W3sic3RyZWFtTmFtZSI6InBva2Vtb24iLCJzdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwicmVjb3Jkc0NvbW1pdHRlZCI6MX19XX0sIm91dHB1dF9jYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJmYWlsdXJlcyI6W119fQ==" + } + ] + }, + "scheduleToCloseTimeout": "0s", + "scheduleToStartTimeout": "0s", + "startToCloseTimeout": "120s", + "heartbeatTimeout": "30s", + "workflowTaskCompletedEventId": "44", + "retryPolicy": { + "initialInterval": "30s", + "backoffCoefficient": 2, + "maximumInterval": "3000s", + "maximumAttempts": 10 + } + } + }, + { + "eventId": "46", + "eventTime": "2022-03-08T22:48:02.437049800Z", + "eventType": "ActivityTaskStarted", + "taskId": "1048770", + "activityTaskStartedEventAttributes": { + "scheduledEventId": "45", + "identity": "1@2741f9c3f558", + "requestId": "9d2fc180-ec33-42a2-a259-d29afb281992", + "attempt": 1 + } + }, + { + "eventId": "47", + "eventTime": "2022-03-08T22:48:02.664164100Z", + "eventType": "ActivityTaskCompleted", + "taskId": "1048771", + "activityTaskCompletedEventAttributes": { + "scheduledEventId": "45", + "startedEventId": "46", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "48", + "eventTime": "2022-03-08T22:48:02.664217700Z", + "eventType": "WorkflowTaskScheduled", + "taskId": "1048772", + "workflowTaskScheduledEventAttributes": { + "taskQueue": { + "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", + "kind": "Sticky" + }, + "startToCloseTimeout": "10s", + "attempt": 1 + } + }, + { + "eventId": "49", + "eventTime": "2022-03-08T22:48:02.676895300Z", + "eventType": "WorkflowTaskStarted", + "taskId": "1048776", + "workflowTaskStartedEventAttributes": { + "scheduledEventId": "48", + "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", + "requestId": "b43c8f30-2500-47d6-a8f6-aa2cd0d99218" + } + }, + { + "eventId": "50", + "eventTime": "2022-03-08T22:48:02.709745Z", + "eventType": "WorkflowTaskCompleted", + "taskId": "1048779", + "workflowTaskCompletedEventAttributes": { + "scheduledEventId": "48", + "startedEventId": "49", + "identity": "1@2741f9c3f558" + } + }, + { + "eventId": "51", + "eventTime": "2022-03-08T22:48:02.709811400Z", + "eventType": "WorkflowExecutionContinuedAsNew", + "taskId": "1048780", + "workflowExecutionContinuedAsNewEventAttributes": { + "newExecutionRunId": "e81cf38b-7f11-4eeb-8c85-301778bf2671", + "workflowType": { + "name": "ConnectionManagerWorkflow" + }, + "taskQueue": { + "name": "CONNECTION_UPDATER", + "kind": "Normal" + }, + "input": { + "payloads": [ + { + "metadata": { + "encoding": "anNvbi9wbGFpbg==" + }, + "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" + } + ] + }, + "workflowRunTimeout": "0s", + "workflowTaskTimeout": "10s", + "workflowTaskCompletedEventId": "50", + "header": {} + } + } + ] +} From f94a8d3068d4a0b6fb36583dede3b9e83402989a Mon Sep 17 00:00:00 2001 From: Charles Date: Fri, 11 Mar 2022 12:05:17 -0800 Subject: [PATCH 22/38] add spotbugs (#10522) --- .../java/io/airbyte/commons/io/IOsTest.java | 2 +- .../io/airbyte/commons/json/JsonsTest.java | 3 +- .../init/YamlSeedConfigPersistence.java | 2 +- .../io/airbyte/config/helpers/GcsLogs.java | 3 +- .../config/helpers/LogClientSingleton.java | 2 +- .../io/airbyte/config/helpers/S3Logs.java | 3 +- .../split_secrets/JsonSecretsProcessor.java | 8 +-- .../java/io/airbyte/db/DataTypeUtils.java | 30 ++++++---- .../db/bigquery/BigQuerySourceOperations.java | 8 +-- .../development/MigrationDevHelper.java | 7 ++- ...bstractJdbcCompatibleSourceOperations.java | 16 +++--- .../integrations/base/IntegrationRunner.java | 3 +- .../base/IntegrationRunnerTest.java | 5 +- .../AirbyteFileOffsetBackingStore.java | 4 ++ .../FilteredFileDatabaseHistory.java | 4 ++ .../debezium/internals/MySQLConverter.java | 5 +- .../debezium/internals/PostgresConverter.java | 33 +++++------ .../DestinationAcceptanceTest.java | 7 ++- .../standardtest/source/TestRunner.java | 5 +- .../AbstractSourceFillDbWithTestData.java | 31 +++++----- .../AbstractSourcePerformanceTest.java | 57 +++++++++---------- ...DenormalizedDestinationAcceptanceTest.java | 2 +- .../BigQueryDenormalizedDestinationTest.java | 2 +- ...ormalizedGcsDestinationAcceptanceTest.java | 2 +- ...igQueryDenormalizedGcsDestinationTest.java | 2 +- .../BigQueryDestinationAcceptanceTest.java | 7 ++- .../bigquery/BigQueryDestinationTest.java | 25 ++++---- .../BigQueryGcsDestinationAcceptanceTest.java | 2 +- .../bigquery/BigQueryGcsDestinationTest.java | 5 +- .../jdbc/copy/gcs/GcsStreamCopier.java | 4 +- .../jdbc/copy/gcs/GcsStreamCopierFactory.java | 3 +- ...estinationStrictEncryptAcceptanceTest.java | 2 +- .../destination/oracle/OracleDestination.java | 3 +- .../postgres/PostgresSqlOperations.java | 3 +- .../PubsubDestinationAcceptanceTest.java | 2 +- .../redshift/RedshiftSqlOperations.java | 3 +- .../SnowflakeDestinationIntegrationTest.java | 5 +- .../BigQuerySourceAcceptanceTest.java | 2 +- .../bigquery/BigQuerySourceDatatypeTest.java | 2 +- .../source/bigquery/BigQuerySourceTest.java | 2 +- ...godbSourceStrictEncryptAcceptanceTest.java | 2 +- .../MongoDbSourceAtlasAcceptanceTest.java | 2 +- .../mssql/FillMsSqlTestDbScriptTest.java | 9 ++- .../MsSqlRdsSourcePerformanceSecretTest.java | 10 ++-- .../mysql/FillMySqlTestDbScriptTest.java | 9 +-- .../MySqlRdsSourcePerformanceSecretTest.java | 7 +-- .../FillPostgresTestDbScriptTest.java | 10 ++-- .../PostgresRdsSourcePerformanceTest.java | 10 ++-- .../SlackNotificationClientTest.java | 3 +- .../FacebookOAuthFlowIntegrationTest.java | 2 +- .../GithubOAuthFlowIntegrationTest.java | 2 +- .../IntercomOAuthFlowIntegrationTest.java | 2 +- .../LinkedinAdsOAuthFlowIntegrationTest.java | 2 +- .../PipeDriveOAuthFlowIntegrationTest.java | 4 +- .../QuickbooksOAuthFlowIntegrationTest.java | 2 +- .../SalesforceOAuthFlowIntegrationTest.java | 11 ++-- .../SlackOAuthFlowIntegrationTest.java | 2 +- ...chatMarketingOAuthFlowIntegrationTest.java | 2 +- .../SquareOAuthFlowIntegrationTest.java | 2 +- .../SurveymonkeyOAuthFlowIntegrationTest.java | 2 +- .../TrelloOAuthFlowIntegrationTest.java | 5 +- .../HubspotOAuthFlowIntegrationTest.java | 8 +-- .../oauth/flows/OAuthFlowIntegrationTest.java | 15 ++--- .../GoogleAdsOAuthFlowIntegrationTest.java | 5 +- ...ogleAnalyticsOAuthFlowIntegrationTest.java | 5 +- ...SearchConsoleOAuthFlowIntegrationTest.java | 5 +- .../GoogleSheetsOAuthFlowIntegrationTest.java | 5 +- .../io/airbyte/scheduler/app/JobLogs.java | 2 +- .../airbyte/scheduler/app/SchedulerApp.java | 2 +- .../scheduler/models/AttemptStatus.java | 2 +- .../airbyte/scheduler/models/JobStatus.java | 2 +- .../io/airbyte/server/RequestLoggerTest.java | 7 ++- .../services/AirbyteGithubStoreTest.java | 4 +- .../ImportApi.java | 3 +- .../AirbyteTestContainer.java | 7 ++- .../test_helpers/EntrypointEnvChecker.java | 3 +- .../KubePodProcessIntegrationTest.java | 5 +- .../workers/DefaultGetSpecWorkerTest.java | 2 +- .../DefaultNormalizationRunnerTest.java | 5 +- .../DefaultAirbyteStreamFactoryTest.java | 5 +- ...urceAndDestinationFailureSyncWorkflow.java | 2 +- build.gradle | 9 +++ spotbugs-exclude-filter-file.xml | 20 +++++++ 83 files changed, 303 insertions(+), 236 deletions(-) create mode 100644 spotbugs-exclude-filter-file.xml diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/io/IOsTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/io/IOsTest.java index 062d043ea429..5f72d9ccd02e 100644 --- a/airbyte-commons/src/test/java/io/airbyte/commons/io/IOsTest.java +++ b/airbyte-commons/src/test/java/io/airbyte/commons/io/IOsTest.java @@ -75,7 +75,7 @@ public void testGetTailExists() throws IOException { "line7", "line8"); - final Writer writer = new BufferedWriter(new FileWriter(stdoutFile.toString(), true)); + final Writer writer = new BufferedWriter(new FileWriter(stdoutFile.toString(), StandardCharsets.UTF_8, true)); for (final String line : Iterables.concat(head, expectedTail)) { writer.write(line + "\n"); diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonsTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonsTest.java index 63a47fc577d6..8ae3b104f58a 100644 --- a/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonsTest.java +++ b/airbyte-commons/src/test/java/io/airbyte/commons/json/JsonsTest.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -55,7 +56,7 @@ void testSerializeJsonNode() { assertEquals( "{\"test\":\"dGVzdA==\"}", Jsons.serialize(Jsons.jsonNode(ImmutableMap.of( - "test", new BinaryNode("test".getBytes()))))); + "test", new BinaryNode("test".getBytes(StandardCharsets.UTF_8)))))); } @Test diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/YamlSeedConfigPersistence.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/YamlSeedConfigPersistence.java index 0603f6f323ac..7dc8fdc66e9d 100644 --- a/airbyte-config/init/src/main/java/io/airbyte/config/init/YamlSeedConfigPersistence.java +++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/YamlSeedConfigPersistence.java @@ -35,7 +35,7 @@ */ public class YamlSeedConfigPersistence implements ConfigPersistence { - public static Class DEFAULT_SEED_DEFINITION_RESOURCE_CLASS = SeedType.class; + public static final Class DEFAULT_SEED_DEFINITION_RESOURCE_CLASS = SeedType.class; private static final Map CONFIG_SCHEMA_MAP = Map.of( ConfigSchema.STANDARD_SOURCE_DEFINITION, SeedType.STANDARD_SOURCE_DEFINITION, diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java index 7c5b539c6a9b..b922eb7a2738 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/GcsLogs.java @@ -15,6 +15,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -89,7 +90,7 @@ public List tailCloudLog(final LogConfigs configs, final String logPath, final var poppedBlob = descendingTimestampBlobs.remove(0); try (final var inMemoryData = new ByteArrayOutputStream()) { poppedBlob.downloadTo(inMemoryData); - final var currFileLines = inMemoryData.toString().split("\n"); + final var currFileLines = inMemoryData.toString(StandardCharsets.UTF_8).split("\n"); final List currFileLinesReversed = Lists.reverse(List.of(currFileLines)); for (final var line : currFileLinesReversed) { if (linesRead == numLines) { diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java index 680551fbca8b..2db2966aef03 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/LogClientSingleton.java @@ -117,7 +117,7 @@ public List getJobLogFile(final WorkerEnvironment workerEnvironment, fin */ @VisibleForTesting public void deleteLogs(final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, final String logPath) { - if (logPath == null || logPath.equals(Path.of(""))) { + if (logPath == null || logPath.equals("")) { return; } diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/S3Logs.java b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/S3Logs.java index 2bae243de650..2f3633f37766 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/helpers/S3Logs.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/helpers/S3Logs.java @@ -16,6 +16,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -172,7 +173,7 @@ private static ArrayList getCurrFile(final S3Client s3Client, final Stri final var data = s3Client.getObjectAsBytes(getObjReq).asByteArray(); final var is = new ByteArrayInputStream(data); final var currentFileLines = new ArrayList(); - try (final var reader = new BufferedReader(new InputStreamReader(is))) { + try (final var reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String temp; while ((temp = reader.readLine()) != null) { currentFileLines.add(temp); diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java index 39fda6d2fb54..9face68323ae 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/JsonSecretsProcessor.java @@ -26,11 +26,11 @@ public class JsonSecretsProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(JsonSecretsProcessor.class); - public static String AIRBYTE_SECRET_FIELD = "airbyte_secret"; + public static final String AIRBYTE_SECRET_FIELD = "airbyte_secret"; public static final String PROPERTIES_FIELD = "properties"; - public static String TYPE_FIELD = "type"; - public static String ARRAY_TYPE_FIELD = "array"; - public static String ITEMS_FIELD = "items"; + public static final String TYPE_FIELD = "type"; + public static final String ARRAY_TYPE_FIELD = "array"; + public static final String ITEMS_FIELD = "items"; private static final JsonSchemaValidator VALIDATOR = new JsonSchemaValidator(); diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/DataTypeUtils.java b/airbyte-db/lib/src/main/java/io/airbyte/db/DataTypeUtils.java index 53396d027425..7cad6a4c4d1b 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/DataTypeUtils.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/DataTypeUtils.java @@ -22,10 +22,18 @@ public class DataTypeUtils { private static final Logger LOGGER = LoggerFactory.getLogger(DataTypeUtils.class); public static final String DATE_FORMAT_PATTERN = "yyyy-MM-dd'T'HH:mm:ss'Z'"; - public static final DateFormat DATE_FORMAT = new SimpleDateFormat(DATE_FORMAT_PATTERN); // Quoted "Z" to indicate UTC, no timezone offset public static final String DATE_FORMAT_WITH_MILLISECONDS_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - public static final DateFormat DATE_FORMAT_WITH_MILLISECONDS = new SimpleDateFormat(DATE_FORMAT_WITH_MILLISECONDS_PATTERN); + + // wrap SimpleDateFormat in a function because SimpleDateFormat is not threadsafe as a static final. + public static DateFormat getDateFormat() { + return new SimpleDateFormat(DATE_FORMAT_PATTERN); // Quoted "Z" to indicate UTC, no timezone offset; + } + + // wrap SimpleDateFormat in a function because SimpleDateFormat is not threadsafe as a static final. + public static DateFormat getDateFormatMillisPattern() { + return new SimpleDateFormat(DATE_FORMAT_WITH_MILLISECONDS_PATTERN); + } public static T returnNullIfInvalid(final DataTypeSupplier valueProducer) { return returnNullIfInvalid(valueProducer, ignored -> true); @@ -44,15 +52,15 @@ public static T returnNullIfInvalid(final DataTypeSupplier valueProducer, } } - public static String toISO8601StringWithMicroseconds(Instant instant) { + public static String toISO8601StringWithMicroseconds(final Instant instant) { - String dateWithMilliseconds = DATE_FORMAT_WITH_MILLISECONDS.format(Date.from(instant)); + final String dateWithMilliseconds = getDateFormatMillisPattern().format(Date.from(instant)); return dateWithMilliseconds.substring(0, 23) + calculateMicrosecondsString(instant.getNano()) + dateWithMilliseconds.substring(23); } - private static String calculateMicrosecondsString(int nano) { - var microSeconds = (nano / 1000) % 1000; - String result; + private static String calculateMicrosecondsString(final int nano) { + final var microSeconds = (nano / 1000) % 1000; + final String result; if (microSeconds < 10) { result = "00" + microSeconds; } else if (microSeconds < 100) { @@ -64,15 +72,15 @@ private static String calculateMicrosecondsString(int nano) { } public static String toISO8601StringWithMilliseconds(final long epochMillis) { - return DATE_FORMAT_WITH_MILLISECONDS.format(Date.from(Instant.ofEpochMilli(epochMillis))); + return getDateFormatMillisPattern().format(Date.from(Instant.ofEpochMilli(epochMillis))); } public static String toISO8601String(final long epochMillis) { - return DATE_FORMAT.format(Date.from(Instant.ofEpochMilli(epochMillis))); + return getDateFormat().format(Date.from(Instant.ofEpochMilli(epochMillis))); } public static String toISO8601String(final java.util.Date date) { - return DATE_FORMAT.format(date); + return getDateFormat().format(date); } public static String toISOTimeString(final LocalDateTime dateTime) { @@ -88,7 +96,7 @@ public static String toISO8601String(final LocalDateTime date) { } public static String toISO8601String(final Duration duration) { - return DATE_FORMAT.format(Date.from(Instant.ofEpochSecond(Math.abs(duration.getSeconds()), Math.abs(duration.getNano())))); + return getDateFormat().format(Date.from(Instant.ofEpochSecond(Math.abs(duration.getSeconds()), Math.abs(duration.getNano())))); } } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java b/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java index 0cb65422a3b2..8d4a9604c0a1 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/bigquery/BigQuerySourceOperations.java @@ -115,7 +115,7 @@ public Date getDateValue(final FieldValue fieldValue, final DateFormat dateForma } @Override - public JsonSchemaType getJsonType(StandardSQLTypeName bigQueryType) { + public JsonSchemaType getJsonType(final StandardSQLTypeName bigQueryType) { return switch (bigQueryType) { case BOOL -> JsonSchemaType.BOOLEAN; case INT64, FLOAT64, NUMERIC, BIGNUMERIC -> JsonSchemaType.NUMBER; @@ -129,11 +129,11 @@ public JsonSchemaType getJsonType(StandardSQLTypeName bigQueryType) { private String getFormattedValue(final StandardSQLTypeName paramType, final String paramValue) { try { return switch (paramType) { - case DATE -> BIG_QUERY_DATE_FORMAT.format(DataTypeUtils.DATE_FORMAT.parse(paramValue)); + case DATE -> BIG_QUERY_DATE_FORMAT.format(DataTypeUtils.getDateFormat().parse(paramValue)); case DATETIME -> BIG_QUERY_DATETIME_FORMAT - .format(DataTypeUtils.DATE_FORMAT.parse(paramValue)); + .format(DataTypeUtils.getDateFormat().parse(paramValue)); case TIMESTAMP -> BIG_QUERY_TIMESTAMP_FORMAT - .format(DataTypeUtils.DATE_FORMAT.parse(paramValue)); + .format(DataTypeUtils.getDateFormat().parse(paramValue)); default -> paramValue; }; } catch (final ParseException e) { diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java index 6c28fb73cab2..9c11b7aaf788 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/development/MigrationDevHelper.java @@ -14,6 +14,7 @@ import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.Arrays; import java.util.List; @@ -77,7 +78,7 @@ public static void createNextMigrationFile(final String dbIdentifier, final Flyw final File file = new File(Path.of(filePath).toUri()); FileUtils.forceMkdirParent(file); - try (final PrintWriter writer = new PrintWriter(file)) { + try (final PrintWriter writer = new PrintWriter(file, StandardCharsets.UTF_8)) { writer.println(newMigration); } catch (final FileNotFoundException e) { throw new IOException(e); @@ -93,7 +94,7 @@ public static Optional getSecondToLastMigrationVersion(final F } public static void dumpSchema(final String schema, final String schemaDumpFile, final boolean printSchema) throws IOException { - try (final PrintWriter writer = new PrintWriter(new File(Path.of(schemaDumpFile).toUri()))) { + try (final PrintWriter writer = new PrintWriter(new File(Path.of(schemaDumpFile).toUri()), StandardCharsets.UTF_8)) { writer.println(schema); if (printSchema) { System.out.println("\n==== Schema ====\n" + schema); @@ -138,7 +139,7 @@ private static Optional getLastMigrationVersion(final FlywayDa @VisibleForTesting static AirbyteVersion getCurrentAirbyteVersion() { - try (final BufferedReader reader = new BufferedReader(new FileReader("../../.env"))) { + try (final BufferedReader reader = new BufferedReader(new FileReader("../../.env", StandardCharsets.UTF_8))) { String line; while ((line = reader.readLine()) != null) { if (line.startsWith("VERSION")) { diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java index 54394668820b..fefa0db06a58 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/AbstractJdbcCompatibleSourceOperations.java @@ -53,8 +53,8 @@ public JsonNode rowToJson(final ResultSet queryContext) throws SQLException { } protected void putArray(final ObjectNode node, final String columnName, final ResultSet resultSet, final int index) throws SQLException { - ArrayNode arrayNode = new ObjectMapper().createArrayNode(); - ResultSet arrayResultSet = resultSet.getArray(index).getResultSet(); + final ArrayNode arrayNode = new ObjectMapper().createArrayNode(); + final ResultSet arrayResultSet = resultSet.getArray(index).getResultSet(); while (arrayResultSet.next()) { arrayNode.add(arrayResultSet.getString(2)); } @@ -144,10 +144,10 @@ protected void setTimestamp(final PreparedStatement preparedStatement, final int // value in the following format try { var valueWithoutMicros = value; - StringBuilder nanos = new StringBuilder(); - var dotIndex = value.indexOf("."); + final StringBuilder nanos = new StringBuilder(); + final var dotIndex = value.indexOf("."); if (dotIndex > 0) { - var micro = value.substring(value.lastIndexOf('.') + 1, value.length() - 1); + final var micro = value.substring(value.lastIndexOf('.') + 1, value.length() - 1); nanos.append(micro); valueWithoutMicros = value.replace("." + micro, ""); } @@ -155,8 +155,8 @@ protected void setTimestamp(final PreparedStatement preparedStatement, final int nanos.append("0"); } - var timestamp = Timestamp - .from(DataTypeUtils.DATE_FORMAT.parse(valueWithoutMicros).toInstant()); + final var timestamp = Timestamp + .from(DataTypeUtils.getDateFormat().parse(valueWithoutMicros).toInstant()); timestamp.setNanos(Integer.parseInt(nanos.toString())); preparedStatement.setTimestamp(parameterIndex, timestamp); } catch (final ParseException e) { @@ -166,7 +166,7 @@ protected void setTimestamp(final PreparedStatement preparedStatement, final int protected void setDate(final PreparedStatement preparedStatement, final int parameterIndex, final String value) throws SQLException { try { - final Timestamp from = Timestamp.from(DataTypeUtils.DATE_FORMAT.parse(value).toInstant()); + final Timestamp from = Timestamp.from(DataTypeUtils.getDateFormat().parse(value).toInstant()); preparedStatement.setDate(parameterIndex, new Date(from.getTime())); } catch (final ParseException e) { throw new RuntimeException(e); diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java index 5065823df283..7a777edebe25 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java @@ -27,6 +27,7 @@ import io.sentry.Sentry; import io.sentry.SentryLevel; import io.sentry.SpanStatus; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.List; import java.util.Map; @@ -172,7 +173,7 @@ private void runInternal(final IntegrationConfig parsed) throws Exception { static void consumeWriteStream(final AirbyteMessageConsumer consumer) throws Exception { // use a Scanner that only processes new line characters to strictly abide with the // https://jsonlines.org/ standard - final Scanner input = new Scanner(System.in).useDelimiter("[\r\n]+"); + final Scanner input = new Scanner(System.in, StandardCharsets.UTF_8).useDelimiter("[\r\n]+"); consumer.start(); while (input.hasNext()) { final String inputString = input.next(); diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java index 6102c6ae6054..d090d6e172c7 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java @@ -37,6 +37,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.time.Instant; @@ -260,7 +261,7 @@ void testDestinationConsumerLifecycleSuccess() throws Exception { .withData(Jsons.deserialize("{ \"checkpoint\": \"1\" }"))); System.setIn(new ByteArrayInputStream((Jsons.serialize(message1) + "\n" + Jsons.serialize(message2) + "\n" - + Jsons.serialize(stateMessage)).getBytes())); + + Jsons.serialize(stateMessage)).getBytes(StandardCharsets.UTF_8))); try (final AirbyteMessageConsumer airbyteMessageConsumerMock = mock(AirbyteMessageConsumer.class)) { IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock); @@ -285,7 +286,7 @@ void testDestinationConsumerLifecycleFailure() throws Exception { .withData(Jsons.deserialize("{ \"color\": \"yellow\" }")) .withStream(STREAM_NAME) .withEmittedAt(EMITTED_AT)); - System.setIn(new ByteArrayInputStream((Jsons.serialize(message1) + "\n" + Jsons.serialize(message2)).getBytes())); + System.setIn(new ByteArrayInputStream((Jsons.serialize(message1) + "\n" + Jsons.serialize(message2)).getBytes(StandardCharsets.UTF_8))); try (final AirbyteMessageConsumer airbyteMessageConsumerMock = mock(AirbyteMessageConsumer.class)) { doThrow(new IOException("error")).when(airbyteMessageConsumerMock).accept(message1); diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java index 5625c2cc7e37..1933b796eac4 100644 --- a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java +++ b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/AirbyteFileOffsetBackingStore.java @@ -84,6 +84,10 @@ private static ByteBuffer stringToByteBuffer(final String s) { @SuppressWarnings("unchecked") private Map load() { try (final SafeObjectInputStream is = new SafeObjectInputStream(Files.newInputStream(offsetFilePath))) { + // todo (cgardens) - we currently suppress a security warning for this line. use of readObject from + // untrusted sources is considered unsafe. Since the source is controlled by us in this case it + // should be safe. That said, changing this implementation to not use readObject would remove some + // headache. final Object obj = is.readObject(); if (!(obj instanceof HashMap)) throw new ConnectException("Expected HashMap but found " + obj.getClass()); diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java index 8cb8edac3af5..3030c35b9b41 100644 --- a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java +++ b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/FilteredFileDatabaseHistory.java @@ -102,6 +102,10 @@ public void storeRecord(final HistoryRecord record) throws DatabaseHistoryExcept public void stop() { fileDatabaseHistory.stop(); // this is just for tests + resetDbName(); + } + + public static void resetDbName() { databaseName = null; } diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java index 0044caa69be0..eb53e6640eb4 100644 --- a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java +++ b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/MySQLConverter.java @@ -7,6 +7,7 @@ import io.airbyte.db.DataTypeUtils; import io.debezium.spi.converter.CustomConverter; import io.debezium.spi.converter.RelationalColumn; +import java.nio.charset.StandardCharsets; import java.time.LocalDate; import java.util.Arrays; import java.util.Properties; @@ -50,7 +51,7 @@ private void registerText(final RelationalColumn field, final ConverterRegistrat return DebeziumConverterUtils.convertDefaultValue(field); } if (x instanceof byte[]) { - return new String((byte[]) x); + return new String((byte[]) x, StandardCharsets.UTF_8); } else { return x.toString(); } @@ -62,7 +63,7 @@ private void registerText(final RelationalColumn field, final ConverterRegistrat * the doc, it should be done by driver, but it fails. */ private Object convertDefaultValueNullDate(final RelationalColumn field) { - var defaultValue = DebeziumConverterUtils.convertDefaultValue(field); + final var defaultValue = DebeziumConverterUtils.convertDefaultValue(field); return (defaultValue == null && !field.isOptional() ? DataTypeUtils.toISO8601String(LocalDate.EPOCH) : defaultValue); } diff --git a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java index dc45ee017e47..b763c9f8590f 100644 --- a/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java +++ b/airbyte-integrations/bases/debezium/src/main/java/io/airbyte/integrations/debezium/internals/PostgresConverter.java @@ -7,6 +7,7 @@ import io.debezium.spi.converter.CustomConverter; import io.debezium.spi.converter.RelationalColumn; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Properties; import org.apache.kafka.connect.data.SchemaBuilder; @@ -25,10 +26,10 @@ public class PostgresConverter implements CustomConverter registration) { + public void converterFor(final RelationalColumn field, final ConverterRegistration registration) { if (Arrays.stream(DATE_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName()))) { registerDate(field, registration); } else if (Arrays.stream(TEXT_TYPES).anyMatch(s -> s.equalsIgnoreCase(field.typeName())) @@ -40,21 +41,21 @@ public void converterFor(RelationalColumn field, ConverterRegistration registration) { + private void registerText(final RelationalColumn field, final ConverterRegistration registration) { registration.register(SchemaBuilder.string(), x -> { if (x == null) { return DebeziumConverterUtils.convertDefaultValue(field); } if (x instanceof byte[]) { - return new String((byte[]) x); + return new String((byte[]) x, StandardCharsets.UTF_8); } else { return x.toString(); } }); } - private void registerDate(RelationalColumn field, ConverterRegistration registration) { + private void registerDate(final RelationalColumn field, final ConverterRegistration registration) { registration.register(SchemaBuilder.string(), x -> { if (x == null) { return DebeziumConverterUtils.convertDefaultValue(field); @@ -66,8 +67,8 @@ private void registerDate(RelationalColumn field, ConverterRegistration registration) { + private void registerMoney(final RelationalColumn field, final ConverterRegistration registration) { registration.register(SchemaBuilder.string(), x -> { if (x == null) { return DebeziumConverterUtils.convertDefaultValue(field); } else if (x instanceof Double) { - BigDecimal result = BigDecimal.valueOf((Double) x); + final BigDecimal result = BigDecimal.valueOf((Double) x); if (result.compareTo(new BigDecimal("999999999999999")) == 1 || result.compareTo(new BigDecimal("-999999999999999")) == -1) { return null; @@ -93,7 +94,7 @@ private void registerMoney(RelationalColumn field, ConverterRegistration readMessagesFromFile(String messagesFilename) throws IOException { + private List readMessagesFromFile(final String messagesFilename) throws IOException { return MoreResources.readResource(messagesFilename).lines() .map(record -> Jsons.deserialize(record, AirbyteMessage.class)).collect(Collectors.toList()); } diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/TestRunner.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/TestRunner.java index 179b76fa1b4e..436ae0235fc6 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/TestRunner.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/TestRunner.java @@ -7,6 +7,7 @@ import static org.junit.platform.engine.discovery.DiscoverySelectors.selectClass; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import org.junit.platform.launcher.Launcher; import org.junit.platform.launcher.LauncherDiscoveryRequest; import org.junit.platform.launcher.TestPlan; @@ -29,8 +30,8 @@ public static void runTestClass(final Class testClass) { launcher.execute(plan, listener); - listener.getSummary().printFailuresTo(new PrintWriter(System.out)); - listener.getSummary().printTo(new PrintWriter(System.out)); + listener.getSummary().printFailuresTo(new PrintWriter(System.out, false, StandardCharsets.UTF_8)); + listener.getSummary().printTo(new PrintWriter(System.out, false, StandardCharsets.UTF_8)); if (listener.getSummary().getTestsFailedCount() > 0) { System.out.println( diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java index 696a95f02494..9680761dd699 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourceFillDbWithTestData.java @@ -25,7 +25,6 @@ public abstract class AbstractSourceFillDbWithTestData extends AbstractSourceBas protected static final Logger c = LoggerFactory.getLogger(AbstractSourceFillDbWithTestData.class); private static final String TEST_VALUE_TEMPLATE_POSTGRES = "\'Value id_placeholder\'"; - protected static Stream testArgs; /** * Setup the test database. All tables and data described in the registered tests will be put there. @@ -42,12 +41,12 @@ public abstract class AbstractSourceFillDbWithTestData extends AbstractSourceBas @Disabled @ParameterizedTest @MethodSource("provideParameters") - public void addTestData(String dbName, - String schemaName, - int numberOfDummyRecords, - int numberOfBatches, - int numberOfColumns, - int numberOfStreams) + public void addTestData(final String dbName, + final String schemaName, + final int numberOfDummyRecords, + final int numberOfBatches, + final int numberOfColumns, + final int numberOfStreams) throws Exception { final Database database = setupDatabase(dbName); @@ -55,11 +54,11 @@ public void addTestData(String dbName, database.query(ctx -> { for (int currentSteamNumber = 0; currentSteamNumber < numberOfStreams; currentSteamNumber++) { - String currentTableName = String.format(getTestStreamNameTemplate(), currentSteamNumber); + final String currentTableName = String.format(getTestStreamNameTemplate(), currentSteamNumber); ctx.fetch(prepareCreateTableQuery(schemaName, numberOfColumns, currentTableName)); for (int i = 0; i < numberOfBatches; i++) { - String insertQueryTemplate = prepareInsertQueryTemplate(schemaName, i, + final String insertQueryTemplate = prepareInsertQueryTemplate(schemaName, i, numberOfColumns, numberOfDummyRecords); ctx.fetch(String.format(insertQueryTemplate, currentTableName)); @@ -86,15 +85,13 @@ public void addTestData(String dbName, * * Stream.of( Arguments.of("your_db_name", "your_schema_name", 100, 2, 240, 1000) ); */ - private static Stream provideParameters() { - return testArgs; - } + protected abstract Stream provideParameters(); protected String prepareCreateTableQuery(final String dbSchemaName, final int numberOfColumns, final String currentTableName) { - StringJoiner sj = new StringJoiner(","); + final StringJoiner sj = new StringJoiner(","); for (int i = 0; i < numberOfColumns; i++) { sj.add(String.format(" %s%s %s", getTestColumnName(), i, TEST_DB_FIELD_TYPE)); } @@ -107,10 +104,10 @@ protected String prepareInsertQueryTemplate(final String dbSchemaName, final int numberOfColumns, final int recordsNumber) { - StringJoiner fieldsNames = new StringJoiner(","); + final StringJoiner fieldsNames = new StringJoiner(","); fieldsNames.add("id"); - StringJoiner baseInsertQuery = new StringJoiner(","); + final StringJoiner baseInsertQuery = new StringJoiner(","); baseInsertQuery.add("id_placeholder"); for (int i = 0; i < numberOfColumns; i++) { @@ -118,9 +115,9 @@ protected String prepareInsertQueryTemplate(final String dbSchemaName, baseInsertQuery.add(TEST_VALUE_TEMPLATE_POSTGRES); } - StringJoiner insertGroupValuesJoiner = new StringJoiner(","); + final StringJoiner insertGroupValuesJoiner = new StringJoiner(","); - int batchMessages = batchNumber * 100; + final int batchMessages = batchNumber * 100; for (int currentRecordNumber = batchMessages; currentRecordNumber < recordsNumber + batchMessages; diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.java index adfd1e7e5893..a3b223cde537 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/performancetest/AbstractSourcePerformanceTest.java @@ -36,7 +36,6 @@ public abstract class AbstractSourcePerformanceTest extends AbstractSourceBasePerformanceTest { protected static final Logger c = LoggerFactory.getLogger(AbstractSourcePerformanceTest.class); - protected static Stream testArgs; private static final String ID_COLUMN_NAME = "id"; protected JsonNode config; @@ -55,27 +54,6 @@ protected JsonNode getConfig() { @Override protected void tearDown(final TestDestinationEnv testEnv) {} - @ParameterizedTest - @MethodSource("provideParameters") - public void testPerformance(String dbName, - String schemaName, - int numberOfDummyRecords, - int numberOfColumns, - int numberOfStreams) - throws Exception { - - setupDatabase(dbName); - - ConfiguredAirbyteCatalog catalog = getConfiguredCatalog(schemaName, numberOfStreams, - numberOfColumns); - Map mapOfExpectedRecordsCount = prepareMapWithExpectedRecords( - numberOfStreams, numberOfDummyRecords); - Map checkStatusMap = runReadVerifyNumberOfReceivedMsgs(catalog, null, - mapOfExpectedRecordsCount); - validateNumberOfReceivedMsgs(checkStatusMap); - - } - /** * This is a data provider for performance tests, Each argument's group would be ran as a separate * test. Set the "testArgs" in test class of your DB in @BeforeTest method. @@ -91,8 +69,27 @@ public void testPerformance(String dbName, * Arguments.of("newregular25tables50000records", "dbo", 50052, 8, 25), * Arguments.of("newsmall1000tableswith10000rows", "dbo", 10011, 8, 1000) ); */ - private static Stream provideParameters() { - return testArgs; + protected abstract Stream provideParameters(); + + @ParameterizedTest + @MethodSource("provideParameters") + public void testPerformance(final String dbName, + final String schemaName, + final int numberOfDummyRecords, + final int numberOfColumns, + final int numberOfStreams) + throws Exception { + + setupDatabase(dbName); + + final ConfiguredAirbyteCatalog catalog = getConfiguredCatalog(schemaName, numberOfStreams, + numberOfColumns); + final Map mapOfExpectedRecordsCount = prepareMapWithExpectedRecords( + numberOfStreams, numberOfDummyRecords); + final Map checkStatusMap = runReadVerifyNumberOfReceivedMsgs(catalog, null, + mapOfExpectedRecordsCount); + validateNumberOfReceivedMsgs(checkStatusMap); + } /** @@ -107,7 +104,7 @@ protected String getIdColumnName() { protected void validateNumberOfReceivedMsgs(final Map checkStatusMap) { // Iterate through all streams map and check for streams where - Map failedStreamsMap = checkStatusMap.entrySet().stream() + final Map failedStreamsMap = checkStatusMap.entrySet().stream() .filter(el -> el.getValue() != 0).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); if (!failedStreamsMap.isEmpty()) { @@ -118,7 +115,7 @@ protected void validateNumberOfReceivedMsgs(final Map checkStat protected Map prepareMapWithExpectedRecords(final int streamNumber, final int expectedRecordsNumberInEachStream) { - Map resultMap = new HashMap<>(); // streamName&expected records in stream + final Map resultMap = new HashMap<>(); // streamName&expected records in stream for (int currentStream = 0; currentStream < streamNumber; currentStream++) { final String streamName = String.format(getTestStreamNameTemplate(), currentStream); @@ -135,12 +132,12 @@ protected Map prepareMapWithExpectedRecords(final int streamNum protected ConfiguredAirbyteCatalog getConfiguredCatalog(final String nameSpace, final int numberOfStreams, final int numberOfColumns) { - List streams = new ArrayList<>(); + final List streams = new ArrayList<>(); for (int currentStream = 0; currentStream < numberOfStreams; currentStream++) { // CREATE TABLE test.test_1_int(id INTEGER PRIMARY KEY, test_column int) - List fields = new ArrayList<>(); + final List fields = new ArrayList<>(); fields.add(Field.of(getIdColumnName(), JsonSchemaType.NUMBER)); for (int currentColumnNumber = 0; @@ -149,7 +146,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog(final String nameSpace, fields.add(Field.of(getTestColumnName() + currentColumnNumber, JsonSchemaType.STRING)); } - AirbyteStream airbyteStream = CatalogHelpers + final AirbyteStream airbyteStream = CatalogHelpers .createAirbyteStream(String.format(getTestStreamNameTemplate(), currentStream), nameSpace, fields) .withSourceDefinedCursor(true) @@ -157,7 +154,7 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog(final String nameSpace, .withSupportedSyncModes( Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)); - ConfiguredAirbyteStream configuredAirbyteStream = new ConfiguredAirbyteStream() + final ConfiguredAirbyteStream configuredAirbyteStream = new ConfiguredAirbyteStream() .withSyncMode(SyncMode.INCREMENTAL) .withCursorField(Lists.newArrayList(getIdColumnName())) .withDestinationSyncMode(DestinationSyncMode.APPEND) diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationAcceptanceTest.java index 484fd902a9ad..9b25b368edff 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationAcceptanceTest.java @@ -181,7 +181,7 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString).get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); final String projectId = credentialsJson.get(CONFIG_PROJECT_ID).asText(); diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java index 77d6b936457a..b097048196ee 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java @@ -96,7 +96,7 @@ void setup(final TestInfo info) throws IOException { "Must provide path to a big query credentials file. By default {module-root}/" + CREDENTIALS_PATH + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString).get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); final String projectId = credentialsJson.get(BigQueryConsts.CONFIG_PROJECT_ID).asText(); diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java index 0301d6635f3c..d4458e157b44 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationAcceptanceTest.java @@ -180,7 +180,7 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String fullConfigFromSecretFileAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigFromSecretFileAsString = Files.readString(CREDENTIALS_PATH); final JsonNode fullConfigFromSecretFileJson = Jsons.deserialize(fullConfigFromSecretFileAsString); final JsonNode bigqueryConfigFromSecretFile = fullConfigFromSecretFileJson.get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationTest.java index 11697e942af1..032b32d165c4 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedGcsDestinationTest.java @@ -111,7 +111,7 @@ void setup(final TestInfo info) throws IOException { "Must provide path to a big query credentials file. By default {module-root}/" + CREDENTIALS_PATH + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString).get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); final JsonNode credentialsGcsJson = Jsons.deserialize(credentialsJsonString).get(BigQueryConsts.GCS_CONFIG); diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java index d67b634c3f28..e5272c974743 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationAcceptanceTest.java @@ -29,6 +29,7 @@ import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -171,7 +172,7 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString).get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); final String projectId = credentialsJson.get(CONFIG_PROJECT_ID).asText(); final String datasetLocation = "US"; @@ -188,9 +189,9 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { setupBigQuery(credentialsJson); } - protected void setupBigQuery(JsonNode credentialsJson) throws IOException { + protected void setupBigQuery(final JsonNode credentialsJson) throws IOException { final ServiceAccountCredentials credentials = ServiceAccountCredentials - .fromStream(new ByteArrayInputStream(credentialsJson.toString().getBytes())); + .fromStream(new ByteArrayInputStream(credentialsJson.toString().getBytes(StandardCharsets.UTF_8))); bigquery = BigQueryOptions.newBuilder() .setProjectId(config.get(CONFIG_PROJECT_ID).asText()) diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java index 121664fee08e..fb1089ead4e9 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java @@ -50,6 +50,7 @@ import io.airbyte.protocol.models.JsonSchemaType; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.time.Instant; @@ -119,13 +120,13 @@ void setup(final TestInfo info) throws IOException { throw new IllegalStateException( "Must provide path to a big query credentials file. By default {module-root}/config/credentials.json. Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString).get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); final String projectId = credentialsJson.get(BigQueryConsts.CONFIG_PROJECT_ID).asText(); final ServiceAccountCredentials credentials = ServiceAccountCredentials - .fromStream(new ByteArrayInputStream(credentialsJson.toString().getBytes())); + .fromStream(new ByteArrayInputStream(credentialsJson.toString().getBytes(StandardCharsets.UTF_8))); bigquery = BigQueryOptions.newBuilder() .setProjectId(projectId) .setCredentials(credentials) @@ -204,7 +205,7 @@ void testSpec() throws Exception { @ParameterizedTest @MethodSource("datasetIdResetterProvider") - void testCheckSuccess(DatasetIdResetter resetDatasetId) { + void testCheckSuccess(final DatasetIdResetter resetDatasetId) { resetDatasetId.accept(config); final AirbyteConnectionStatus actual = new BigQueryDestination().check(config); final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); @@ -213,7 +214,7 @@ void testCheckSuccess(DatasetIdResetter resetDatasetId) { @ParameterizedTest @MethodSource("datasetIdResetterProvider") - void testCheckFailure(DatasetIdResetter resetDatasetId) { + void testCheckFailure(final DatasetIdResetter resetDatasetId) { ((ObjectNode) config).put(BigQueryConsts.CONFIG_PROJECT_ID, "fake"); resetDatasetId.accept(config); final AirbyteConnectionStatus actual = new BigQueryDestination().check(config); @@ -226,7 +227,7 @@ void testCheckFailure(DatasetIdResetter resetDatasetId) { @ParameterizedTest @MethodSource("datasetIdResetterProvider") - void testWriteSuccess(DatasetIdResetter resetDatasetId) throws Exception { + void testWriteSuccess(final DatasetIdResetter resetDatasetId) throws Exception { resetDatasetId.accept(config); final BigQueryDestination destination = new BigQueryDestination(); final AirbyteMessageConsumer consumer = destination.getConsumer(config, catalog, Destination::defaultOutputRecordCollector); @@ -257,7 +258,7 @@ void testWriteSuccess(DatasetIdResetter resetDatasetId) throws Exception { @ParameterizedTest @MethodSource("datasetIdResetterProvider") - void testWriteFailure(DatasetIdResetter resetDatasetId) throws Exception { + void testWriteFailure(final DatasetIdResetter resetDatasetId) throws Exception { resetDatasetId.accept(config); // hack to force an exception to be thrown from within the consumer. final AirbyteMessage spiedMessage = spy(MESSAGE_USERS1); @@ -320,7 +321,7 @@ private List retrieveRecords(final String tableName) throws Exception @ParameterizedTest @MethodSource("datasetIdResetterProvider") - void testWritePartitionOverUnpartitioned(DatasetIdResetter resetDatasetId) throws Exception { + void testWritePartitionOverUnpartitioned(final DatasetIdResetter resetDatasetId) throws Exception { resetDatasetId.accept(config); final String raw_table_name = String.format("_airbyte_raw_%s", USERS_STREAM_NAME); createUnpartitionedTable(bigquery, dataset, raw_table_name); @@ -386,13 +387,13 @@ private boolean isTablePartitioned(final BigQuery bigquery, final Dataset datase private static class DatasetIdResetter { - private Consumer consumer; + private final Consumer consumer; - DatasetIdResetter(Consumer consumer) { + DatasetIdResetter(final Consumer consumer) { this.consumer = consumer; } - public void accept(JsonNode config) { + public void accept(final JsonNode config) { consumer.accept(config); } @@ -404,8 +405,8 @@ private static Stream datasetIdResetterProvider() { Arguments.arguments(new DatasetIdResetter(config -> {})), Arguments.arguments(new DatasetIdResetter( config -> { - String projectId = ((ObjectNode) config).get(BigQueryConsts.CONFIG_PROJECT_ID).asText(); - String datasetId = ((ObjectNode) config).get(BigQueryConsts.CONFIG_DATASET_ID).asText(); + final String projectId = ((ObjectNode) config).get(BigQueryConsts.CONFIG_PROJECT_ID).asText(); + final String datasetId = ((ObjectNode) config).get(BigQueryConsts.CONFIG_DATASET_ID).asText(); ((ObjectNode) config).put(BigQueryConsts.CONFIG_DATASET_ID, String.format("%s:%s", projectId, datasetId)); }))); diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java index 2b966877e6a0..6ca3d3441e2d 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationAcceptanceTest.java @@ -23,7 +23,7 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String fullConfigFromSecretFileAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigFromSecretFileAsString = Files.readString(CREDENTIALS_PATH); final JsonNode fullConfigFromSecretFileJson = Jsons.deserialize(fullConfigFromSecretFileAsString); final JsonNode bigqueryConfigFromSecretFile = fullConfigFromSecretFileJson.get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationTest.java index d4bb8329cca7..766b0f1f8be8 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsDestinationTest.java @@ -48,6 +48,7 @@ import io.airbyte.protocol.models.JsonSchemaType; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.time.Instant; @@ -112,14 +113,14 @@ void setup(final TestInfo info) throws IOException { throw new IllegalStateException( "Must provide path to a big query credentials file. By default {module-root}/config/credentials.json. Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString).get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); final JsonNode credentialsGcsJson = Jsons.deserialize(fullConfigAsString).get(BigQueryConsts.GCS_CONFIG); final String projectId = credentialsJson.get(BigQueryConsts.CONFIG_PROJECT_ID).asText(); final ServiceAccountCredentials credentials = ServiceAccountCredentials - .fromStream(new ByteArrayInputStream(credentialsJson.toString().getBytes())); + .fromStream(new ByteArrayInputStream(credentialsJson.toString().getBytes(StandardCharsets.UTF_8))); bigquery = BigQueryOptions.newBuilder() .setProjectId(projectId) .setCredentials(credentials) diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java index 1c1f7d06ce60..248eac9cdfe0 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopier.java @@ -220,12 +220,12 @@ private static void attemptWriteAndDeleteGcsObject(final GcsConfig gcsConfig, fi final var blobId = BlobId.of(gcsConfig.getBucketName(), "check-content/" + outputTableName); final var blobInfo = BlobInfo.newBuilder(blobId).build(); - storage.create(blobInfo, "".getBytes()); + storage.create(blobInfo, "".getBytes(StandardCharsets.UTF_8)); storage.delete(blobId); } public static Storage getStorageClient(final GcsConfig gcsConfig) throws IOException { - final InputStream credentialsInputStream = new ByteArrayInputStream(gcsConfig.getCredentialsJson().getBytes()); + final InputStream credentialsInputStream = new ByteArrayInputStream(gcsConfig.getCredentialsJson().getBytes(StandardCharsets.UTF_8)); final GoogleCredentials credentials = GoogleCredentials.fromStream(credentialsInputStream); return StorageOptions.newBuilder() .setCredentials(credentials) diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java index deec9c67a32b..0a31c4db9cd5 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/gcs/GcsStreamCopierFactory.java @@ -17,6 +17,7 @@ import io.airbyte.protocol.models.DestinationSyncMode; import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.nio.charset.StandardCharsets; public abstract class GcsStreamCopierFactory implements StreamCopierFactory { @@ -36,7 +37,7 @@ public StreamCopier create(final String configuredSchema, final DestinationSyncMode syncMode = configuredStream.getDestinationSyncMode(); final String schema = StreamCopierFactory.getSchema(stream.getNamespace(), configuredSchema, nameTransformer); - final InputStream credentialsInputStream = new ByteArrayInputStream(gcsConfig.getCredentialsJson().getBytes()); + final InputStream credentialsInputStream = new ByteArrayInputStream(gcsConfig.getCredentialsJson().getBytes(StandardCharsets.UTF_8)); final GoogleCredentials credentials = GoogleCredentials.fromStream(credentialsInputStream); final Storage storageClient = StorageOptions.newBuilder() .setCredentials(credentials) diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java index c3e08e854443..24b1344473da 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java @@ -43,7 +43,7 @@ static void setupConfig() throws IOException { "Must provide path to a MongoDB credentials file. By default {module-root}/" + CREDENTIALS_PATH + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString); final JsonNode instanceConfig = Jsons.jsonNode(ImmutableMap.builder() diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java index 795882a30ad9..8259258686d0 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java @@ -14,6 +14,7 @@ import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import java.io.IOException; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -134,7 +135,7 @@ private static void tryConvertAndImportCertificate(final String certificate) { private static void convertAndImportCertificate(final String certificate) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); - try (final PrintWriter out = new PrintWriter("certificate.pem")) { + try (final PrintWriter out = new PrintWriter("certificate.pem", StandardCharsets.UTF_8)) { out.print(certificate); } runProcess("openssl x509 -outform der -in certificate.pem -out certificate.der", run); diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java index 27d1caa5f67c..f779791cc977 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresSqlOperations.java @@ -11,6 +11,7 @@ import java.io.File; import java.io.FileReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.sql.SQLException; import java.util.List; @@ -41,7 +42,7 @@ public void insertRecordsInternal(final JdbcDatabase database, final var copyManager = new CopyManager(connection.unwrap(BaseConnection.class)); final var sql = String.format("COPY %s.%s FROM stdin DELIMITER ',' CSV", schemaName, tmpTableName); - final var bufferedReader = new BufferedReader(new FileReader(tmpFile)); + final var bufferedReader = new BufferedReader(new FileReader(tmpFile, StandardCharsets.UTF_8)); copyManager.copyIn(sql, bufferedReader); } catch (final Exception e) { throw new RuntimeException(e); diff --git a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java index 40083e5b2830..6f15a5499bdc 100644 --- a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java @@ -168,7 +168,7 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString); final String projectId = credentialsJson.get(CONFIG_PROJECT_ID).asText(); diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftSqlOperations.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftSqlOperations.java index ac4ea2cf60a3..5887c0c58824 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftSqlOperations.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftSqlOperations.java @@ -12,6 +12,7 @@ import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.SqlOperationsUtils; import io.airbyte.protocol.models.AirbyteRecordMessage; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.util.List; import org.slf4j.Logger; @@ -59,7 +60,7 @@ public void insertRecordsInternal(final JdbcDatabase database, @Override public boolean isValidData(final JsonNode data) { final String stringData = Jsons.serialize(data); - final int dataSize = stringData.getBytes().length; + final int dataSize = stringData.getBytes(StandardCharsets.UTF_8).length; return dataSize <= REDSHIFT_VARCHAR_MAX_BYTE_SIZE; } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java index 6d6f50b129fd..4c47280e9a66 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java @@ -15,6 +15,7 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.protocol.models.AirbyteConnectionStatus; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.sql.SQLException; @@ -30,7 +31,7 @@ void testCheckFailsWithInvalidPermissions() throws Exception { // schema // this connector should be updated with multiple credentials, each with a clear purpose (valid, // invalid: insufficient permissions, invalid: wrong password, etc..) - final JsonNode credentialsJsonString = Jsons.deserialize(new String(Files.readAllBytes(Paths.get("secrets/config.json")))); + final JsonNode credentialsJsonString = Jsons.deserialize(Files.readString(Paths.get("secrets/config.json"))); final AirbyteConnectionStatus check = new SnowflakeDestination().check(credentialsJsonString); assertEquals(AirbyteConnectionStatus.Status.FAILED, check.getStatus()); } @@ -63,7 +64,7 @@ private void syncWithoutNamingResolver(final JdbcDatabase database, final String } private JsonNode getConfig() throws IOException { - final JsonNode config = Jsons.deserialize(new String(Files.readAllBytes(Paths.get("secrets/insert_config.json")))); + final JsonNode config = Jsons.deserialize(Files.readString(Paths.get("secrets/insert_config.json"))); final String schemaName = "schemaName with whitespace " + Strings.addRandomSuffix("integration_test", "_", 5); ((ObjectNode) config).put("schema", schemaName); return config; diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceAcceptanceTest.java b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceAcceptanceTest.java index 7e6fed890777..cfaf256c126a 100644 --- a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceAcceptanceTest.java @@ -40,7 +40,7 @@ protected void setupEnvironment(final TestDestinationEnv testEnv) throws IOExcep + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString); final String projectId = credentialsJson.get(CONFIG_PROJECT_ID).asText(); diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java index 48c26ce3fc12..9ed9bf823584 100644 --- a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceDatatypeTest.java @@ -59,7 +59,7 @@ protected Database setupDatabase() throws Exception { + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString); final String projectId = credentialsJson.get(CONFIG_PROJECT_ID).asText(); diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceTest.java b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceTest.java index 78f6e306f97a..d2d9cab80220 100644 --- a/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceTest.java +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/java/io/airbyte/integrations/source/bigquery/BigQuerySourceTest.java @@ -49,7 +49,7 @@ void setUp() throws IOException, SQLException { + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString); final String projectId = credentialsJson.get(CONFIG_PROJECT_ID).asText(); diff --git a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java index 14638bda2f3e..0b460ce9da46 100644 --- a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java @@ -61,7 +61,7 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString); final JsonNode instanceConfig = Jsons.jsonNode(ImmutableMap.builder() diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MongoDbSourceAtlasAcceptanceTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MongoDbSourceAtlasAcceptanceTest.java index 9cb8b5f062b3..3fdd839f9f18 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MongoDbSourceAtlasAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/MongoDbSourceAtlasAcceptanceTest.java @@ -31,7 +31,7 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc + ". Override by setting setting path with the CREDENTIALS_PATH constant."); } - final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String credentialsJsonString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString); final JsonNode instanceConfig = Jsons.jsonNode(ImmutableMap.builder() diff --git a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java index 4182353dc894..ed5ba68a46ce 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/FillMsSqlTestDbScriptTest.java @@ -33,7 +33,7 @@ protected String getImageName() { } @Override - protected Database setupDatabase(String dbName) { + protected Database setupDatabase(final String dbName) { final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Standard") .build()); @@ -70,10 +70,9 @@ protected Database setupDatabase(String dbName) { * - a number of streams to read in configured airbyte Catalog. Each stream\table in DB should be * names like "test_0", "test_1",..., test_n. */ - @BeforeAll - public static void beforeAll() { - AbstractSourceFillDbWithTestData.testArgs = Stream.of( - Arguments.of("your_db_name", "dbo", 100, 2, 240, 1000) // "dbo" is a default schema name in MsSQl DB + @Override + protected Stream provideParameters() { + return Stream.of(Arguments.of("your_db_name", "dbo", 100, 2, 240, 1000) // "dbo" is a default schema name in MsSQl DB ); } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MsSqlRdsSourcePerformanceSecretTest.java b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MsSqlRdsSourcePerformanceSecretTest.java index ae5dc98a4f44..4b88c2c506c4 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MsSqlRdsSourcePerformanceSecretTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-performance/java/io/airbyte/integrations/source/mssql/MsSqlRdsSourcePerformanceSecretTest.java @@ -24,8 +24,8 @@ protected String getImageName() { } @Override - protected void setupDatabase(String dbName) { - JsonNode plainConfig = Jsons.deserialize(IOs.readFile(Path.of(PERFORMANCE_SECRET_CREDS))); + protected void setupDatabase(final String dbName) { + final JsonNode plainConfig = Jsons.deserialize(IOs.readFile(Path.of(PERFORMANCE_SECRET_CREDS))); config = Jsons.jsonNode(ImmutableMap.builder() .put("host", plainConfig.get("host")) @@ -44,9 +44,9 @@ protected void setupDatabase(String dbName) { * use for Airbyte Cataloq configuration 5th arg - a number of streams to read in configured airbyte * Catalog. Each stream\table in DB should be names like "test_0", "test_1",..., test_n. */ - @BeforeAll - public static void beforeAll() { - AbstractSourcePerformanceTest.testArgs = Stream.of( + @Override + protected Stream provideParameters() { + return Stream.of( Arguments.of("t1000_c240_r200", "dbo", 200, 240, 1000), Arguments.of("t25_c8_r50k_s10kb", "dbo", 50000, 8, 25), Arguments.of("t1000_c8_r10k_s500b", "dbo", 10000, 8, 1000)); diff --git a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java index d9fcf5ef3208..68f6de841793 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/FillMySqlTestDbScriptTest.java @@ -74,12 +74,9 @@ protected Database setupDatabase(final String dbName) throws Exception { * - a number of streams to read in configured airbyte Catalog. Each stream\table in DB should be * names like "test_0", "test_1",..., test_n. */ - @BeforeAll - public static void beforeAll() { - AbstractSourceFillDbWithTestData.testArgs = Stream.of( - Arguments.of("your_db_name", "your_schema_name", 100, 2, 240, 1000) + @Override + protected Stream provideParameters() { // for MySQL DB name ans schema name would be the same - ); + return Stream.of(Arguments.of("your_db_name", "your_schema_name", 100, 2, 240, 1000)); } - } diff --git a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java index 3c41de10b80f..5c7322ed3bb6 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-performance/java/io/airbyte/integrations/source/mysql/MySqlRdsSourcePerformanceSecretTest.java @@ -65,12 +65,11 @@ protected void setupDatabase(final String dbName) throws Exception { * use for Airbyte Cataloq configuration 5th arg - a number of streams to read in configured airbyte * Catalog. Each stream\table in DB should be names like "test_0", "test_1",..., test_n. */ - @BeforeAll - public static void beforeAll() { - AbstractSourcePerformanceTest.testArgs = Stream.of( + @Override + protected Stream provideParameters() { + return Stream.of( Arguments.of("t1000_c240_r200", "t1000_c240_r200", 200, 240, 1000), Arguments.of("t25_c8_r50k_s10kb", "t25_c8_r50k_s10kb", 50000, 8, 25), Arguments.of("t1000_c8_r10k_s500b", "t1000_c8_r10k_s500b", 10000, 8, 1000)); } - } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java index e72f4328759b..6113c16bb8cb 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/FillPostgresTestDbScriptTest.java @@ -34,7 +34,7 @@ protected String getImageName() { } @Override - protected Database setupDatabase(String dbName) throws Exception { + protected Database setupDatabase(final String dbName) throws Exception { final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Standard") .build()); @@ -71,10 +71,8 @@ protected Database setupDatabase(String dbName) throws Exception { * - a number of streams to read in configured airbyte Catalog. Each stream\table in DB should be * names like "test_0", "test_1",..., test_n. */ - @BeforeAll - public static void beforeAll() { - AbstractSourceFillDbWithTestData.testArgs = Stream.of( - Arguments.of("postgres", "\"your_schema_name\"", 100, 2, 240, 1000)); + @Override + protected Stream provideParameters() { + return Stream.of(Arguments.of("postgres", "\"your_schema_name\"", 100, 2, 240, 1000)); } - } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/PostgresRdsSourcePerformanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/PostgresRdsSourcePerformanceTest.java index 2798dda94413..912d8a988ce6 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/PostgresRdsSourcePerformanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-performance/java/io/airbyte/integrations/source/postgres/PostgresRdsSourcePerformanceTest.java @@ -27,8 +27,8 @@ protected String getImageName() { } @Override - protected void setupDatabase(String dbName) { - JsonNode plainConfig = Jsons.deserialize(IOs.readFile(Path.of(PERFORMANCE_SECRET_CREDS))); + protected void setupDatabase(final String dbName) { + final JsonNode plainConfig = Jsons.deserialize(IOs.readFile(Path.of(PERFORMANCE_SECRET_CREDS))); final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Standard") @@ -54,9 +54,9 @@ protected void setupDatabase(String dbName) { * use for Airbyte Cataloq configuration 5th arg - a number of streams to read in configured airbyte * Catalog. Each stream\table in DB should be names like "test_0", "test_1",..., test_n. */ - @BeforeAll - public static void beforeAll() { - AbstractSourcePerformanceTest.testArgs = Stream.of( + @Override + protected Stream provideParameters() { + return Stream.of( Arguments.of(SCHEMAS.get(0), SCHEMAS.get(0), 200, 240, 1000), Arguments.of(SCHEMAS.get(1), SCHEMAS.get(1), 50000, 8, 25), Arguments.of(SCHEMAS.get(2), SCHEMAS.get(2), 10000, 8, 1000)); diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java index f7ba698d7900..c9c580a87aaa 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java @@ -21,6 +21,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.UUID; import org.apache.commons.io.IOUtils; import org.junit.jupiter.api.AfterEach; @@ -145,7 +146,7 @@ public void handle(final HttpExchange t) throws IOException { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); } diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java index be75654c3e1e..2d41d8a951cc 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java @@ -56,7 +56,7 @@ protected int getServerListeningPort() { public void testFullFacebookOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java index 1c99d8566fd3..849b2d25f845 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java @@ -57,7 +57,7 @@ public void testFullGithubOAuthFlow() throws InterruptedException, ConfigNotFoun int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java index a0258a7178fa..f34faf548410 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java @@ -57,7 +57,7 @@ public void testFullIntercomOAuthFlow() throws InterruptedException, ConfigNotFo int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java index eb10a1d45bf2..25ec200248f1 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java @@ -51,7 +51,7 @@ public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundExcep int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java index e8780b9c1bec..ea5183cbcef1 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java @@ -42,7 +42,7 @@ protected int getServerListeningPort() { } @Override - protected OAuthFlowImplementation getFlowImplementation(ConfigRepository configRepository, HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { return new PipeDriveOAuthFlow(configRepository, httpClient); } @@ -50,7 +50,7 @@ protected OAuthFlowImplementation getFlowImplementation(ConfigRepository configR public void testFullPipeDriveOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(getCredentialsPath())); + final String fullConfigAsString = Files.readString(getCredentialsPath()); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java index 658bf7acf27c..b397662489ea 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java @@ -51,7 +51,7 @@ public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundExcep int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java index 8c255c23d920..eb557406e170 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java @@ -22,6 +22,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.http.HttpClient; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; @@ -73,7 +74,7 @@ public void testFullSalesforceOAuthFlow() throws InterruptedException, ConfigNot int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); final String clientId = credentialsJson.get("client_id").asText(); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() @@ -107,7 +108,7 @@ static class ServerHandler implements HttpHandler { private String paramValue; private boolean succeeded; - public ServerHandler(String expectedParam) { + public ServerHandler(final String expectedParam) { this.expectedParam = expectedParam; this.paramValue = ""; this.succeeded = false; @@ -126,7 +127,7 @@ public Map getResponseQuery() { } @Override - public void handle(HttpExchange t) { + public void handle(final HttpExchange t) { final String query = t.getRequestURI().getQuery(); LOGGER.info("Received query: '{}'", query); final Map data; @@ -146,9 +147,9 @@ public void handle(HttpExchange t) { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); - } catch (RuntimeException | IOException e) { + } catch (final RuntimeException | IOException e) { LOGGER.error("Failed to parse from body {}", query, e); } } diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java index 3e69a32df32f..15dcf444c0a6 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java @@ -46,7 +46,7 @@ public void testFullSlackOAuthFlow() throws InterruptedException, ConfigNotFound int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(getCredentialsPath())); + final String fullConfigAsString = Files.readString(getCredentialsPath()); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString).get("credentials"); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java index 21b3d4cc6b59..478aea0dd349 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java @@ -50,7 +50,7 @@ protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository c public void testFullSnapchatMarketingOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(getCredentialsPath())); + final String fullConfigAsString = Files.readString(getCredentialsPath()); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java index 03da842f2471..18c385aa9a8b 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java @@ -57,7 +57,7 @@ public void testFullSquareOAuthFlow() throws InterruptedException, ConfigNotFoun int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java index ac97847bcbd7..a70e8b8d0b98 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java @@ -55,7 +55,7 @@ protected int getServerListeningPort() { public void testFullSurveymonkeyOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java index 39b6876857cf..08922439a43a 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java @@ -22,6 +22,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.http.HttpClient; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; @@ -73,7 +74,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); final String clientId = credentialsJson.get("client_id").asText(); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() @@ -147,7 +148,7 @@ public void handle(final HttpExchange t) { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); } catch (final RuntimeException | IOException e) { LOGGER.error("Failed to parse from body {}", query, e); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java index 844a3f142446..ddc3abb65c4f 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java @@ -31,12 +31,12 @@ protected Path getCredentialsPath() { return Path.of("secrets/hubspot.json"); } - protected OAuthFlowImplementation getFlowObject(ConfigRepository configRepository) { + protected OAuthFlowImplementation getFlowObject(final ConfigRepository configRepository) { return new HubspotOAuthFlow(configRepository, httpClient); } @Override - protected OAuthFlowImplementation getFlowImplementation(ConfigRepository configRepository, HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { return new HubspotOAuthFlow(configRepository, httpClient); } @@ -45,7 +45,7 @@ public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundExcep int limit = 100; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(getCredentialsPath())); + final String fullConfigAsString = Files.readString(getCredentialsPath()); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) @@ -55,7 +55,7 @@ public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundExcep .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) .build())))); - var flowObject = getFlowImplementation(configRepository, httpClient); + final var flowObject = getFlowImplementation(configRepository, httpClient); final String url = flowObject.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null); LOGGER.info("Waiting for user consent at: {}", url); // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java index fc94c924cb47..d83b7ee42fdf 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java @@ -15,6 +15,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.http.HttpClient; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; @@ -97,7 +98,7 @@ public static class ServerHandler implements HttpHandler { private String paramValue; private boolean succeeded; - public ServerHandler(String expectedParam) { + public ServerHandler(final String expectedParam) { this.expectedParam = expectedParam; this.paramValue = ""; this.succeeded = false; @@ -112,7 +113,7 @@ public String getParamValue() { } @Override - public void handle(HttpExchange t) { + public void handle(final HttpExchange t) { final String query = t.getRequestURI().getQuery(); LOGGER.info("Received query: '{}'", query); final Map data; @@ -131,20 +132,20 @@ public void handle(HttpExchange t) { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); - } catch (RuntimeException | IOException e) { + } catch (final RuntimeException | IOException e) { LOGGER.error("Failed to parse from body {}", query, e); } } - private static Map deserialize(String query) { + private static Map deserialize(final String query) { if (query == null) { return null; } final Map result = new HashMap<>(); - for (String param : query.split("&")) { - String[] entry = param.split("=", 2); + for (final String param : query.split("&")) { + final String[] entry = param.split("=", 2); if (entry.length > 1) { result.put(entry[0], entry[1]); } else { diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java index d3b925f4dd28..0709e15643f4 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java @@ -22,6 +22,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.http.HttpClient; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; @@ -73,7 +74,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH, StandardCharsets.UTF_8); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) @@ -143,7 +144,7 @@ public void handle(final HttpExchange t) { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); } catch (final RuntimeException | IOException e) { LOGGER.error("Failed to parse from body {}", query, e); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java index c043152344e7..bc22de9572fa 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java @@ -22,6 +22,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.http.HttpClient; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; @@ -73,7 +74,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) @@ -143,7 +144,7 @@ public void handle(final HttpExchange t) { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); } catch (final RuntimeException | IOException e) { LOGGER.error("Failed to parse from body {}", query, e); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java index 6d9be552abe8..42fd98721b8b 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java @@ -22,6 +22,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.http.HttpClient; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; @@ -73,7 +74,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) @@ -143,7 +144,7 @@ public void handle(final HttpExchange t) { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); } catch (final RuntimeException | IOException e) { LOGGER.error("Failed to parse from body {}", query, e); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java index 21a6a83e4acd..80001023dc6f 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java @@ -22,6 +22,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.http.HttpClient; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; @@ -73,7 +74,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); - final String fullConfigAsString = new String(Files.readAllBytes(CREDENTIALS_PATH)); + final String fullConfigAsString = Files.readString(CREDENTIALS_PATH); final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString); when(configRepository.listSourceOAuthParam()).thenReturn(List.of(new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) @@ -143,7 +144,7 @@ public void handle(final HttpExchange t) { t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); - os.write(response.getBytes()); + os.write(response.getBytes(StandardCharsets.UTF_8)); os.close(); } catch (final RuntimeException | IOException e) { LOGGER.error("Failed to parse from body {}", query, e); diff --git a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/JobLogs.java b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/JobLogs.java index 73ead5bfb5b3..7b21eb71a574 100644 --- a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/JobLogs.java +++ b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/JobLogs.java @@ -8,7 +8,7 @@ public class JobLogs { - public static String ROOT_PATH = "logs/jobs"; + public static final String ROOT_PATH = "logs/jobs"; public static String getLogDirectory(final String scope) { return Paths.get(ROOT_PATH, scope).toString(); diff --git a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java index 84e0ecf87b90..b01064fbf64e 100644 --- a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java +++ b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java @@ -70,7 +70,7 @@ */ public class SchedulerApp { - public static AtomicInteger PENDING_JOBS = new AtomicInteger(); + public static final AtomicInteger PENDING_JOBS = new AtomicInteger(); private static final Logger LOGGER = LoggerFactory.getLogger(SchedulerApp.class); diff --git a/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/AttemptStatus.java b/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/AttemptStatus.java index 21416691839b..cb56cd9b87d8 100644 --- a/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/AttemptStatus.java +++ b/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/AttemptStatus.java @@ -13,6 +13,6 @@ public enum AttemptStatus { FAILED, SUCCEEDED; - public static Set TERMINAL_STATUSES = Sets.newHashSet(FAILED, SUCCEEDED); + public static final Set TERMINAL_STATUSES = Sets.newHashSet(FAILED, SUCCEEDED); } diff --git a/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java b/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java index c94ff4ca5913..dca3be7b7b92 100644 --- a/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java +++ b/airbyte-scheduler/models/src/main/java/io/airbyte/scheduler/models/JobStatus.java @@ -16,6 +16,6 @@ public enum JobStatus { SUCCEEDED, CANCELLED; - public static Set TERMINAL_STATUSES = Sets.newHashSet(FAILED, SUCCEEDED, CANCELLED); + public static final Set TERMINAL_STATUSES = Sets.newHashSet(FAILED, SUCCEEDED, CANCELLED); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/RequestLoggerTest.java b/airbyte-server/src/test/java/io/airbyte/server/RequestLoggerTest.java index 4d9712a38313..d8a8e7b506b6 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/RequestLoggerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/RequestLoggerTest.java @@ -10,6 +10,7 @@ import io.airbyte.config.helpers.LogConfigs; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -47,6 +48,8 @@ public class RequestLoggerTest { private static final String URL = "/api/v1/test"; private static final String REQUEST_BODY_PROPERTY = "requestBodyProperty"; + private static final Random RANDOM = new Random(); + @Mock private HttpServletRequest mServletRequest; @@ -211,7 +214,7 @@ public class RequestResponseRunnable implements Runnable { public void run() { try { requestLogger.filter(mRequestContext); - Thread.sleep(new Random().nextInt(1000)); // random sleep to make race more likely + Thread.sleep(RANDOM.nextInt(1000)); // random sleep to make race more likely requestLogger.filter(mRequestContext, mResponseContext); } catch (final IOException | InterruptedException e) { e.printStackTrace(); @@ -232,7 +235,7 @@ private void stubRequestContext(final ContainerRequestContext mockContainerReque .thenReturn(METHOD); Mockito.when(mockContainerRequestContext.getEntityStream()) - .thenReturn(new ByteArrayInputStream(requestBody.getBytes())); + .thenReturn(new ByteArrayInputStream(requestBody.getBytes(StandardCharsets.UTF_8))); Mockito.when(mockContainerRequestContext.getProperty(REQUEST_BODY_PROPERTY)).thenReturn(requestBody); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/services/AirbyteGithubStoreTest.java b/airbyte-server/src/test/java/io/airbyte/server/services/AirbyteGithubStoreTest.java index 23f3b5a2172b..6beedf771fd1 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/services/AirbyteGithubStoreTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/services/AirbyteGithubStoreTest.java @@ -23,8 +23,8 @@ public class AirbyteGithubStoreTest { private static final Duration TIMEOUT = Duration.ofSeconds(1); - private static MockWebServer webServer; - private static AirbyteGithubStore githubStore; + private MockWebServer webServer; + private AirbyteGithubStore githubStore; @BeforeEach public void setUp() { diff --git a/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/ImportApi.java b/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/ImportApi.java index ad9fa226807d..b4461f330f35 100644 --- a/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/ImportApi.java +++ b/airbyte-tests/src/automaticMigrationAcceptanceTest/java/io/airbyte/test/automaticMigrationAcceptance/ImportApi.java @@ -18,6 +18,7 @@ import java.net.http.HttpRequest; import java.net.http.HttpRequest.Builder; import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.function.Consumer; @@ -66,7 +67,7 @@ public ApiResponse importArchiveWithHttpInfo(final File body) throws "importArchive call received non-success response", localVarResponse.headers(), localVarResponse.body() == null ? null - : new String(localVarResponse.body().readAllBytes())); + : new String(localVarResponse.body().readAllBytes(), StandardCharsets.UTF_8)); } return new ApiResponse( localVarResponse.statusCode(), diff --git a/airbyte-tests/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java b/airbyte-tests/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java index 78b11aa2cf8b..1ec7922dfe9f 100644 --- a/airbyte-tests/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java +++ b/airbyte-tests/src/main/java/io/airbyte/test/airbyte_test_container/AirbyteTestContainer.java @@ -18,6 +18,7 @@ import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; @@ -97,8 +98,8 @@ private static Map prepareDockerComposeEnvVariables(final File e private static File prepareDockerComposeFile(final File originalDockerComposeFile) throws IOException { final File cleanedDockerComposeFile = Files.createTempFile(Path.of("/tmp"), "docker_compose", "acceptance_test").toFile(); - try (final Scanner scanner = new Scanner(originalDockerComposeFile)) { - try (final FileWriter fileWriter = new FileWriter(cleanedDockerComposeFile)) { + try (final Scanner scanner = new Scanner(originalDockerComposeFile, StandardCharsets.UTF_8)) { + try (final FileWriter fileWriter = new FileWriter(cleanedDockerComposeFile, StandardCharsets.UTF_8)) { while (scanner.hasNextLine()) { final String s = scanner.nextLine(); if (s.contains("container_name")) { @@ -157,7 +158,7 @@ private void serviceLogConsumer(final DockerComposeContainer composeContainer private Consumer logConsumer(final String service, final Consumer customConsumer) { return c -> { if (c != null && c.getBytes() != null) { - final String log = new String(c.getBytes()); + final String log = new String(c.getBytes(), StandardCharsets.UTF_8); if (customConsumer != null) { customConsumer.accept(log); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/test_helpers/EntrypointEnvChecker.java b/airbyte-workers/src/main/java/io/airbyte/workers/test_helpers/EntrypointEnvChecker.java index 94763ddb3a6d..0abac23923d2 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/test_helpers/EntrypointEnvChecker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/test_helpers/EntrypointEnvChecker.java @@ -9,6 +9,7 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.Collections; @@ -45,7 +46,7 @@ public static String getEntrypointEnvVariable(final ProcessFactory processFactor Collections.emptyMap(), Collections.emptyMap()); - final BufferedReader stdout = new BufferedReader(new InputStreamReader(process.getInputStream())); + final BufferedReader stdout = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)); String outputLine = null; diff --git a/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java b/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java index e8b21eed85ec..062a422d6754 100644 --- a/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java +++ b/airbyte-workers/src/test-integration/java/io/airbyte/workers/process/KubePodProcessIntegrationTest.java @@ -23,6 +23,7 @@ import java.net.Inet4Address; import java.net.ServerSocket; import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; @@ -197,7 +198,7 @@ public void testSuccessfulSpawningWithQuotes() throws Exception { // start a finite process final var availablePortsBefore = KubePortManagerSingleton.getInstance().getNumAvailablePorts(); final Process process = getProcess("echo \"h\\\"i\"; sleep 1; echo hi2"); - final var output = new String(process.getInputStream().readAllBytes()); + final var output = new String(process.getInputStream().readAllBytes(), StandardCharsets.UTF_8); assertEquals("h\"i\nhi2\n", output); process.waitFor(); @@ -211,7 +212,7 @@ public void testSuccessfulSpawningWithQuotes() throws Exception { public void testEnvMapSet() throws Exception { // start a finite process final Process process = getProcess("echo ENV_VAR_1=$ENV_VAR_1"); - final var output = new String(process.getInputStream().readAllBytes()); + final var output = new String(process.getInputStream().readAllBytes(), StandardCharsets.UTF_8); assertEquals("ENV_VAR_1=ENV_VALUE_1\n", output); process.waitFor(); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/DefaultGetSpecWorkerTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/DefaultGetSpecWorkerTest.java index 9b1c0828a910..600a17dce587 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/DefaultGetSpecWorkerTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/DefaultGetSpecWorkerTest.java @@ -72,7 +72,7 @@ public void testSuccessfulRun() throws IOException, InterruptedException, Worker @Test public void testFailureOnInvalidSpec() throws InterruptedException { final String expectedSpecString = "{\"key\":\"value\"}"; - when(process.getInputStream()).thenReturn(new ByteArrayInputStream(expectedSpecString.getBytes())); + when(process.getInputStream()).thenReturn(new ByteArrayInputStream(expectedSpecString.getBytes(Charsets.UTF_8))); when(process.waitFor(anyLong(), any())).thenReturn(true); when(process.exitValue()).thenReturn(0); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java index a27c47e601d1..bb24d6808dfa 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java @@ -30,6 +30,7 @@ import io.airbyte.workers.process.ProcessFactory; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; @@ -85,8 +86,8 @@ void setup() throws IOException, WorkerException { "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, "--catalog", WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME)) .thenReturn(process); - when(process.getInputStream()).thenReturn(new ByteArrayInputStream("hello".getBytes())); - when(process.getErrorStream()).thenReturn(new ByteArrayInputStream("hello".getBytes())); + when(process.getInputStream()).thenReturn(new ByteArrayInputStream("hello".getBytes(StandardCharsets.UTF_8))); + when(process.getErrorStream()).thenReturn(new ByteArrayInputStream("hello".getBytes(StandardCharsets.UTF_8))); } @AfterEach diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/DefaultAirbyteStreamFactoryTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/DefaultAirbyteStreamFactoryTest.java index d990d719778b..aae30ab4a1fc 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/DefaultAirbyteStreamFactoryTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/protocols/airbyte/DefaultAirbyteStreamFactoryTest.java @@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -119,8 +120,8 @@ public void testMissingNewLineBetweenValidRecords() { } private Stream stringToMessageStream(final String inputString) { - final InputStream inputStream = new ByteArrayInputStream(inputString.getBytes()); - final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream)); + final InputStream inputStream = new ByteArrayInputStream(inputString.getBytes(StandardCharsets.UTF_8)); + final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); return new DefaultAirbyteStreamFactory(protocolPredicate, logger, new Builder()).create(bufferedReader); } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/SourceAndDestinationFailureSyncWorkflow.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/SourceAndDestinationFailureSyncWorkflow.java index ceb414f2b6ce..a318885125b8 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/SourceAndDestinationFailureSyncWorkflow.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/SourceAndDestinationFailureSyncWorkflow.java @@ -22,7 +22,7 @@ public class SourceAndDestinationFailureSyncWorkflow implements SyncWorkflow { @VisibleForTesting - public static Set FAILURE_REASONS = Sets.newLinkedHashSet( + public static final Set FAILURE_REASONS = Sets.newLinkedHashSet( new FailureReason().withFailureOrigin(FailureOrigin.SOURCE).withTimestamp(System.currentTimeMillis()), new FailureReason().withFailureOrigin(FailureOrigin.DESTINATION).withTimestamp(System.currentTimeMillis())); diff --git a/build.gradle b/build.gradle index 54063134e30a..803c5e3a3877 100644 --- a/build.gradle +++ b/build.gradle @@ -16,6 +16,7 @@ plugins { id 'pmd' id 'com.diffplug.spotless' version '6.0.0' id 'com.github.hierynomus.license' version '0.16.1' + id 'com.github.spotbugs' version '5.0.5' } repositories { @@ -177,6 +178,7 @@ subprojects { apply plugin: 'java' apply plugin: 'jacoco' + apply plugin: 'com.github.spotbugs' sourceCompatibility = JavaVersion.VERSION_17 targetCompatibility = JavaVersion.VERSION_17 @@ -247,6 +249,11 @@ subprojects { finalizedBy jacocoTestReport } + spotbugs { + reportLevel = 'high' + excludeFilter = rootProject.file('spotbugs-exclude-filter-file.xml') + } + dependencies { if (project.name != 'airbyte-commons') { implementation project(':airbyte-commons') @@ -303,6 +310,8 @@ subprojects { testImplementation 'org.mockito:mockito-junit-jupiter:4.0.0' testImplementation 'org.assertj:assertj-core:3.21.0' + // adds owasp plugin + spotbugsPlugins 'com.h3xstream.findsecbugs:findsecbugs-plugin:1.11.0' } tasks.withType(Tar) { diff --git a/spotbugs-exclude-filter-file.xml b/spotbugs-exclude-filter-file.xml new file mode 100644 index 000000000000..c5da06291781 --- /dev/null +++ b/spotbugs-exclude-filter-file.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + From c34815d222bb5c9aa2fdbdc0def52707ba3c5b86 Mon Sep 17 00:00:00 2001 From: Parker Mossman Date: Fri, 11 Mar 2022 12:50:16 -0800 Subject: [PATCH 23/38] Remove deprecated FailureReason enum values (#10773) * Remove deprecated/unused enum values from json schema, migration to update records to use corrected values * make migration-specific classes handle any string, and remove extranneous comments/annotations for readability. also test that an unrecognized enum value is left alone and doesn't cause deserialization errors * gradle format * fix test * fix jobTrackerTest with new enum values --- airbyte-api/src/main/openapi/config.yaml | 3 - .../airbyte/bootloader/BootloaderAppTest.java | 2 +- .../main/resources/types/FailureReason.yaml | 9 +- ...0_001__MigrateFailureReasonEnumValues.java | 410 ++++++++++++++++++ ...1__Add_failureSummary_col_to_Attempts.java | 6 + ...1_MigrateFailureReasonEnumValues_Test.java | 180 ++++++++ .../job_tracker/JobTrackerTest.java | 4 +- 7 files changed, 602 insertions(+), 12 deletions(-) create mode 100644 airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001__MigrateFailureReasonEnumValues.java create mode 100644 airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 831a0f8ec395..57c2d4c64608 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -3343,11 +3343,9 @@ components: description: Indicates where the error originated. If not set, the origin of error is not well known. type: string enum: - - unknown # todo (parker) remove this in favor of leaving the failureOrigin unset - source - destination - replication - - replication_worker # todo (parker) remove this in favor of replication - persistence - normalization - dbt @@ -3355,7 +3353,6 @@ components: description: Categorizes well known errors into types for programmatic handling. If not set, the type of error is not well known. type: string enum: - - unknown # todo (parker) remove this in favor of leaving the failureType unset - config_error - system_error - manual_cancellation diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java index 588262aac8e4..f26b32ffe384 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java @@ -72,7 +72,7 @@ void testBootloaderAppBlankDb() throws Exception { container.getPassword(), container.getJdbcUrl()).getInitialized(); val jobsMigrator = new JobsDatabaseMigrator(jobDatabase, this.getClass().getName()); - assertEquals("0.35.5.001", jobsMigrator.getLatestMigration().getVersion().getVersion()); + assertEquals("0.35.40.001", jobsMigrator.getLatestMigration().getVersion().getVersion()); val configDatabase = new ConfigsDatabaseInstance( mockedConfigs.getConfigDatabaseUser(), diff --git a/airbyte-config/models/src/main/resources/types/FailureReason.yaml b/airbyte-config/models/src/main/resources/types/FailureReason.yaml index d75b661265f4..bae623d6da64 100644 --- a/airbyte-config/models/src/main/resources/types/FailureReason.yaml +++ b/airbyte-config/models/src/main/resources/types/FailureReason.yaml @@ -11,11 +11,9 @@ properties: description: Indicates where the error originated. If not set, the origin of error is not well known. type: string enum: - - unknown # todo (parker) remove this in favor of leaving the failureOrigin unset - source - destination - replication - - replicationWorker # todo (parker) remove this in favor of replication - persistence - normalization - dbt @@ -23,10 +21,9 @@ properties: description: Categorizes well known errors into types for programmatic handling. If not set, the type of error is not well known. type: string enum: - - unknown # todo (parker) remove this in favor of leaving the failureType unset - - configError - - systemError - - manualCancellation + - config_error + - system_error + - manual_cancellation internalMessage: description: Human readable failure description for consumption by technical system operators, like Airbyte engineers or OSS users. type: string diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001__MigrateFailureReasonEnumValues.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001__MigrateFailureReasonEnumValues.java new file mode 100644 index 000000000000..0683393a9380 --- /dev/null +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001__MigrateFailureReasonEnumValues.java @@ -0,0 +1,410 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.Metadata; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.JSONB; +import org.jooq.Record; +import org.jooq.Result; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_35_40_001__MigrateFailureReasonEnumValues extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_40_001__MigrateFailureReasonEnumValues.class); + + @VisibleForTesting + static String OLD_MANUAL_CANCELLATION = "manualCancellation"; + static String NEW_MANUAL_CANCELLATION = "manual_cancellation"; + static String OLD_SYSTEM_ERROR = "systemError"; + static String NEW_SYSTEM_ERROR = "system_error"; + static String OLD_CONFIG_ERROR = "configError"; + static String NEW_CONFIG_ERROR = "config_error"; + static String OLD_REPLICATION_ORIGIN = "replicationWorker"; + static String NEW_REPLICATION_ORIGIN = "replication"; + static String OLD_UNKNOWN = "unknown"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + final DSLContext ctx = DSL.using(context.getConnection()); + updateRecordsWithNewEnumValues(ctx); + } + + /** + * Finds all attempt record that have a failure summary containing a deprecated enum value. For each + * record, calls method to fix and update. + */ + static void updateRecordsWithNewEnumValues(final DSLContext ctx) { + final Result results = + ctx.fetch(String.format(""" + SELECT A.* FROM attempts A, jsonb_array_elements(A.failure_summary->'failures') as f + WHERE f->>'failureOrigin' = '%s' + OR f->>'failureOrigin' = '%s' + OR f->>'failureType' = '%s' + OR f->>'failureType' = '%s' + OR f->>'failureType' = '%s' + OR f->>'failureType' = '%s' + """, OLD_UNKNOWN, OLD_REPLICATION_ORIGIN, OLD_UNKNOWN, OLD_CONFIG_ERROR, OLD_SYSTEM_ERROR, OLD_MANUAL_CANCELLATION)); + results.forEach(record -> updateAttemptFailureReasons(ctx, record)); + } + + /** + * Takes in a single record from the above query and performs an UPDATE to set the failure summary + * to the fixed version. + */ + private static void updateAttemptFailureReasons(final DSLContext ctx, final Record record) { + final Field attemptIdField = DSL.field("id", SQLDataType.BIGINT); + final Field failureSummaryField = DSL.field("failure_summary", SQLDataType.JSONB.nullable(true)); + + final Long attemptId = record.get(attemptIdField); + final AttemptFailureSummaryForMigration oldFailureSummary = Jsons.deserialize( + record.get(failureSummaryField).data(), + AttemptFailureSummaryForMigration.class); + + final AttemptFailureSummaryForMigration fixedFailureSummary = getFixedAttemptFailureSummary(oldFailureSummary); + + ctx.update(DSL.table("attempts")) + .set(failureSummaryField, JSONB.valueOf(Jsons.serialize(fixedFailureSummary))) + .where(attemptIdField.eq(attemptId)) + .execute(); + } + + /** + * Takes in a FailureSummary and replaces deprecated enum values with their updated versions. + */ + private static AttemptFailureSummaryForMigration getFixedAttemptFailureSummary(final AttemptFailureSummaryForMigration failureSummary) { + final Map oldFailureTypeToFixedFailureType = ImmutableMap.of( + OLD_MANUAL_CANCELLATION, NEW_MANUAL_CANCELLATION, + OLD_SYSTEM_ERROR, NEW_SYSTEM_ERROR, + OLD_CONFIG_ERROR, NEW_CONFIG_ERROR); + + final Map oldFailureOriginToFixedFailureOrigin = ImmutableMap.of( + OLD_REPLICATION_ORIGIN, NEW_REPLICATION_ORIGIN); + + final List fixedFailureReasons = new ArrayList<>(); + + failureSummary.getFailures().stream().forEach(failureReason -> { + final String failureType = failureReason.getFailureType(); + final String failureOrigin = failureReason.getFailureOrigin(); + + // null failureType is valid and doesn't need correction + if (failureType != null) { + if (oldFailureTypeToFixedFailureType.containsKey(failureType)) { + failureReason.setFailureType(oldFailureTypeToFixedFailureType.get(failureType)); + } else if (failureType.equals(OLD_UNKNOWN)) { + failureReason.setFailureType(null); + } + } + + // null failureOrigin is valid and doesn't need correction + if (failureOrigin != null) { + if (oldFailureOriginToFixedFailureOrigin.containsKey(failureOrigin)) { + failureReason.setFailureOrigin(oldFailureOriginToFixedFailureOrigin.get(failureOrigin)); + } else if (failureOrigin.equals(OLD_UNKNOWN)) { + failureReason.setFailureOrigin(null); + } + } + + fixedFailureReasons.add(failureReason); + }); + + failureSummary.setFailures(fixedFailureReasons); + return failureSummary; + } + + /** + * The following classes are essentially a copy of the FailureReason and AttemptFailureSummary + * classes at the time of this migration. They support both deprecated and new enum values and are + * used for record deserialization in this migration because in the future, the real FailureReason + * class will have those deprecated enum values removed, which would break deserialization within + * this migration. + */ + + static class FailureReasonForMigration implements Serializable { + + private String failureOrigin; + private String failureType; + private String internalMessage; + private String externalMessage; + private Metadata metadata; + private String stacktrace; + private Boolean retryable; + private Long timestamp; + private final static long serialVersionUID = -1485119682657564218L; + + public String getFailureOrigin() { + return failureOrigin; + } + + public void setFailureOrigin(final String failureOrigin) { + this.failureOrigin = failureOrigin; + } + + public FailureReasonForMigration withFailureOrigin(final String failureOrigin) { + this.failureOrigin = failureOrigin; + return this; + } + + public String getFailureType() { + return failureType; + } + + public void setFailureType(final String failureType) { + this.failureType = failureType; + } + + public FailureReasonForMigration withFailureType(final String failureType) { + this.failureType = failureType; + return this; + } + + public String getInternalMessage() { + return internalMessage; + } + + public void setInternalMessage(String internalMessage) { + this.internalMessage = internalMessage; + } + + public FailureReasonForMigration withInternalMessage(String internalMessage) { + this.internalMessage = internalMessage; + return this; + } + + public String getExternalMessage() { + return externalMessage; + } + + public void setExternalMessage(String externalMessage) { + this.externalMessage = externalMessage; + } + + public FailureReasonForMigration withExternalMessage(String externalMessage) { + this.externalMessage = externalMessage; + return this; + } + + public Metadata getMetadata() { + return metadata; + } + + public void setMetadata(Metadata metadata) { + this.metadata = metadata; + } + + public FailureReasonForMigration withMetadata(Metadata metadata) { + this.metadata = metadata; + return this; + } + + public String getStacktrace() { + return stacktrace; + } + + public void setStacktrace(String stacktrace) { + this.stacktrace = stacktrace; + } + + public FailureReasonForMigration withStacktrace(String stacktrace) { + this.stacktrace = stacktrace; + return this; + } + + public Boolean getRetryable() { + return retryable; + } + + public void setRetryable(Boolean retryable) { + this.retryable = retryable; + } + + public FailureReasonForMigration withRetryable(Boolean retryable) { + this.retryable = retryable; + return this; + } + + public Long getTimestamp() { + return timestamp; + } + + public void setTimestamp(Long timestamp) { + this.timestamp = timestamp; + } + + public FailureReasonForMigration withTimestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder(); + sb.append(FailureReasonForMigration.class.getName()).append('@').append(Integer.toHexString(System.identityHashCode(this))).append('['); + sb.append("failureOrigin"); + sb.append('='); + sb.append(((this.failureOrigin == null) ? "" : this.failureOrigin)); + sb.append(','); + sb.append("failureType"); + sb.append('='); + sb.append(((this.failureType == null) ? "" : this.failureType)); + sb.append(','); + sb.append("internalMessage"); + sb.append('='); + sb.append(((this.internalMessage == null) ? "" : this.internalMessage)); + sb.append(','); + sb.append("externalMessage"); + sb.append('='); + sb.append(((this.externalMessage == null) ? "" : this.externalMessage)); + sb.append(','); + sb.append("metadata"); + sb.append('='); + sb.append(((this.metadata == null) ? "" : this.metadata)); + sb.append(','); + sb.append("stacktrace"); + sb.append('='); + sb.append(((this.stacktrace == null) ? "" : this.stacktrace)); + sb.append(','); + sb.append("retryable"); + sb.append('='); + sb.append(((this.retryable == null) ? "" : this.retryable)); + sb.append(','); + sb.append("timestamp"); + sb.append('='); + sb.append(((this.timestamp == null) ? "" : this.timestamp)); + sb.append(','); + if (sb.charAt((sb.length() - 1)) == ',') { + sb.setCharAt((sb.length() - 1), ']'); + } else { + sb.append(']'); + } + return sb.toString(); + } + + @Override + public int hashCode() { + int result = 1; + result = ((result * 31) + ((this.retryable == null) ? 0 : this.retryable.hashCode())); + result = ((result * 31) + ((this.metadata == null) ? 0 : this.metadata.hashCode())); + result = ((result * 31) + ((this.stacktrace == null) ? 0 : this.stacktrace.hashCode())); + result = ((result * 31) + ((this.failureOrigin == null) ? 0 : this.failureOrigin.hashCode())); + result = ((result * 31) + ((this.failureType == null) ? 0 : this.failureType.hashCode())); + result = ((result * 31) + ((this.internalMessage == null) ? 0 : this.internalMessage.hashCode())); + result = ((result * 31) + ((this.externalMessage == null) ? 0 : this.externalMessage.hashCode())); + result = ((result * 31) + ((this.timestamp == null) ? 0 : this.timestamp.hashCode())); + return result; + } + + @Override + public boolean equals(final Object other) { + if (other == this) { + return true; + } + if ((other instanceof FailureReasonForMigration) == false) { + return false; + } + final FailureReasonForMigration rhs = ((FailureReasonForMigration) other); + return (((((((((this.retryable == rhs.retryable) || ((this.retryable != null) && this.retryable.equals(rhs.retryable))) + && ((this.metadata == rhs.metadata) || ((this.metadata != null) && this.metadata.equals(rhs.metadata)))) + && ((this.stacktrace == rhs.stacktrace) || ((this.stacktrace != null) && this.stacktrace.equals(rhs.stacktrace)))) + && ((this.failureOrigin == rhs.failureOrigin) || ((this.failureOrigin != null) && this.failureOrigin.equals(rhs.failureOrigin)))) + && ((this.failureType == rhs.failureType) || ((this.failureType != null) && this.failureType.equals(rhs.failureType)))) + && ((this.internalMessage == rhs.internalMessage) || ((this.internalMessage != null) && this.internalMessage.equals(rhs.internalMessage)))) + && ((this.externalMessage == rhs.externalMessage) || ((this.externalMessage != null) && this.externalMessage.equals(rhs.externalMessage)))) + && ((this.timestamp == rhs.timestamp) || ((this.timestamp != null) && this.timestamp.equals(rhs.timestamp)))); + } + + } + + static class AttemptFailureSummaryForMigration implements Serializable { + + private List failures = new ArrayList<>(); + private Boolean partialSuccess; + private final static long serialVersionUID = -9065693637249217586L; + + public List getFailures() { + return failures; + } + + public void setFailures(final List failures) { + this.failures = failures; + } + + public AttemptFailureSummaryForMigration withFailures(final List failures) { + this.failures = failures; + return this; + } + + public Boolean getPartialSuccess() { + return partialSuccess; + } + + public void setPartialSuccess(Boolean partialSuccess) { + this.partialSuccess = partialSuccess; + } + + public AttemptFailureSummaryForMigration withPartialSuccess(Boolean partialSuccess) { + this.partialSuccess = partialSuccess; + return this; + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder(); + sb.append(AttemptFailureSummaryForMigration.class.getName()).append('@').append(Integer.toHexString(System.identityHashCode(this))).append('['); + sb.append("failures"); + sb.append('='); + sb.append(((this.failures == null) ? "" : this.failures)); + sb.append(','); + sb.append("partialSuccess"); + sb.append('='); + sb.append(((this.partialSuccess == null) ? "" : this.partialSuccess)); + sb.append(','); + if (sb.charAt((sb.length() - 1)) == ',') { + sb.setCharAt((sb.length() - 1), ']'); + } else { + sb.append(']'); + } + return sb.toString(); + } + + @Override + public int hashCode() { + int result = 1; + result = ((result * 31) + ((this.partialSuccess == null) ? 0 : this.partialSuccess.hashCode())); + result = ((result * 31) + ((this.failures == null) ? 0 : this.failures.hashCode())); + return result; + } + + @Override + public boolean equals(final Object other) { + if (other == this) { + return true; + } + if ((other instanceof AttemptFailureSummaryForMigration) == false) { + return false; + } + final AttemptFailureSummaryForMigration rhs = ((AttemptFailureSummaryForMigration) other); + return (((this.partialSuccess == rhs.partialSuccess) || ((this.partialSuccess != null) && this.partialSuccess.equals(rhs.partialSuccess))) + && ((this.failures == rhs.failures) || ((this.failures != null) && this.failures.equals(rhs.failures)))); + } + + } + +} diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_Attempts.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_Attempts.java index 6bb63405b632..9b7c15ce726d 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_Attempts.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_35_5_001__Add_failureSummary_col_to_Attempts.java @@ -4,6 +4,7 @@ package io.airbyte.db.instance.jobs.migrations; +import com.google.common.annotations.VisibleForTesting; import org.flywaydb.core.api.migration.BaseJavaMigration; import org.flywaydb.core.api.migration.Context; import org.jooq.DSLContext; @@ -21,6 +22,11 @@ public void migrate(final Context context) throws Exception { LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); final DSLContext ctx = DSL.using(context.getConnection()); + migrate(ctx); + } + + @VisibleForTesting + public static void migrate(final DSLContext ctx) { addFailureSummaryColumn(ctx); } diff --git a/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java new file mode 100644 index 000000000000..b2d7e93f4dba --- /dev/null +++ b/airbyte-db/lib/src/test/java/io/airbyte/db/instance/jobs/migrations/V0_35_40_001_MigrateFailureReasonEnumValues_Test.java @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_CONFIG_ERROR; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_MANUAL_CANCELLATION; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_REPLICATION_ORIGIN; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.NEW_SYSTEM_ERROR; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_CONFIG_ERROR; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_MANUAL_CANCELLATION; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_REPLICATION_ORIGIN; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_SYSTEM_ERROR; +import static io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.OLD_UNKNOWN; +import static org.jooq.impl.DSL.asterisk; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.Metadata; +import io.airbyte.db.Database; +import io.airbyte.db.instance.jobs.AbstractJobsDatabaseTest; +import io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.AttemptFailureSummaryForMigration; +import io.airbyte.db.instance.jobs.migrations.V0_35_40_001__MigrateFailureReasonEnumValues.FailureReasonForMigration; +import java.util.List; +import org.jooq.DSLContext; +import org.jooq.JSONB; +import org.jooq.Record; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.junit.jupiter.api.Test; + +public class V0_35_40_001_MigrateFailureReasonEnumValues_Test extends AbstractJobsDatabaseTest { + + private static int currJobId = 1; + private static final long timeNowMillis = System.currentTimeMillis(); + private static final String ORIGIN_SOURCE = "source"; + + // create pairs of old failure reasons and their fixed versions. + private static final FailureReasonForMigration originReplicationWorker = baseFailureReason().withFailureOrigin(OLD_REPLICATION_ORIGIN); + private static final FailureReasonForMigration fixedOriginReplicationWorker = baseFailureReason().withFailureOrigin(NEW_REPLICATION_ORIGIN); + + private static final FailureReasonForMigration originUnknown = baseFailureReason().withFailureOrigin(OLD_UNKNOWN); + private static final FailureReasonForMigration fixedOriginUnknown = baseFailureReason().withFailureOrigin(null); + + private static final FailureReasonForMigration typeManualCancellation = baseFailureReason().withFailureType(OLD_MANUAL_CANCELLATION); + private static final FailureReasonForMigration fixedTypeManualCancellation = baseFailureReason().withFailureType(NEW_MANUAL_CANCELLATION); + + private static final FailureReasonForMigration typeSystemError = baseFailureReason().withFailureType(OLD_SYSTEM_ERROR); + private static final FailureReasonForMigration fixedTypeSystemError = baseFailureReason().withFailureType(NEW_SYSTEM_ERROR); + + private static final FailureReasonForMigration typeConfigError = baseFailureReason().withFailureType(OLD_CONFIG_ERROR); + private static final FailureReasonForMigration fixedTypeConfigError = baseFailureReason().withFailureType(NEW_CONFIG_ERROR); + + private static final FailureReasonForMigration typeUnknown = baseFailureReason().withFailureType(OLD_UNKNOWN); + private static final FailureReasonForMigration fixedTypeUnknown = baseFailureReason().withFailureType(null); + + // enum values that don't need updating, or aren't recognized at all, should be left untouched + private static final FailureReasonForMigration noChangeNeeded = baseFailureReason().withFailureOrigin(ORIGIN_SOURCE); + private static final FailureReasonForMigration unrecognizedValue = baseFailureReason().withFailureType("someUnrecognizedValue"); + + // create failure summaries containing failure reasons that need fixing. + // mixing in noChangeNeeded reasons in different spots to make sure the migration properly leaves + // those untouched. + private static final AttemptFailureSummaryForMigration summaryFixReplicationOrigin = getFailureSummary(noChangeNeeded, originReplicationWorker); + private static final AttemptFailureSummaryForMigration summaryFixReplicationOriginAndManualCancellationType = + getFailureSummary(originReplicationWorker, typeManualCancellation, noChangeNeeded); + private static final AttemptFailureSummaryForMigration summaryFixUnknownOriginAndUnknownType = + getFailureSummary(originUnknown, noChangeNeeded, typeUnknown); + private static final AttemptFailureSummaryForMigration summaryFixMultipleSystemErrorType = getFailureSummary(typeSystemError, typeSystemError); + private static final AttemptFailureSummaryForMigration summaryFixConfigErrorType = getFailureSummary(typeConfigError); + private static final AttemptFailureSummaryForMigration summaryNoChangeNeeded = getFailureSummary(noChangeNeeded, noChangeNeeded); + private static final AttemptFailureSummaryForMigration summaryFixOriginAndLeaveUnrecognizedValue = + getFailureSummary(originReplicationWorker, unrecognizedValue); + + // define attempt ids corresponding to each summary above + private static final Long attemptIdForFixReplicationOrigin = 1L; + private static final Long attemptIdForFixReplicationOriginAndManualCancellationType = 2L; + private static final Long attemptIdForFixUnknownOriginAndUnknownType = 3L; + private static final Long attemptIdForFixMultipleSystemErrorType = 4L; + private static final Long attemptIdForFixConfigErrorType = 5L; + private static final Long attemptIdForNoChangeNeeded = 6L; + private static final Long attemptIdForFixOriginAndLeaveUnrecognizedValue = 7L; + + // create expected fixed failure summaries after migration. + private static final AttemptFailureSummaryForMigration expectedSummaryFixReplicationOrigin = + getFailureSummary(noChangeNeeded, fixedOriginReplicationWorker); + private static final AttemptFailureSummaryForMigration expectedSummaryFixReplicationOriginAndManualCancellationType = + getFailureSummary(fixedOriginReplicationWorker, fixedTypeManualCancellation, noChangeNeeded); + private static final AttemptFailureSummaryForMigration expectedSummaryFixUnknownOriginAndUnknownType = + getFailureSummary(fixedOriginUnknown, noChangeNeeded, fixedTypeUnknown); + private static final AttemptFailureSummaryForMigration expectedSummaryFixMultipleSystemErrorType = + getFailureSummary(fixedTypeSystemError, fixedTypeSystemError); + private static final AttemptFailureSummaryForMigration expectedSummaryFixConfigErrorType = + getFailureSummary(fixedTypeConfigError); + private static final AttemptFailureSummaryForMigration expectedSummaryNoChangeNeeded = + getFailureSummary(noChangeNeeded, noChangeNeeded); + private static final AttemptFailureSummaryForMigration expectedFixOriginAndLeaveUnrecognizedValue = + getFailureSummary(fixedOriginReplicationWorker, unrecognizedValue); + + @Test + public void test() throws Exception { + final Database database = getDatabase(); + final DSLContext ctx = DSL.using(database.getDataSource().getConnection()); + + V0_35_5_001__Add_failureSummary_col_to_Attempts.migrate(ctx); + + addRecordsWithOldEnumValues(ctx); + + V0_35_40_001__MigrateFailureReasonEnumValues.updateRecordsWithNewEnumValues(ctx); + + verifyEnumValuesFixed(ctx); + } + + private static void addRecordsWithOldEnumValues(final DSLContext ctx) { + insertAttemptWithSummary(ctx, attemptIdForFixReplicationOrigin, summaryFixReplicationOrigin); + insertAttemptWithSummary(ctx, attemptIdForFixReplicationOriginAndManualCancellationType, summaryFixReplicationOriginAndManualCancellationType); + insertAttemptWithSummary(ctx, attemptIdForFixUnknownOriginAndUnknownType, summaryFixUnknownOriginAndUnknownType); + insertAttemptWithSummary(ctx, attemptIdForFixMultipleSystemErrorType, summaryFixMultipleSystemErrorType); + insertAttemptWithSummary(ctx, attemptIdForFixConfigErrorType, summaryFixConfigErrorType); + insertAttemptWithSummary(ctx, attemptIdForNoChangeNeeded, summaryNoChangeNeeded); + insertAttemptWithSummary(ctx, attemptIdForFixOriginAndLeaveUnrecognizedValue, summaryFixOriginAndLeaveUnrecognizedValue); + } + + private static void verifyEnumValuesFixed(final DSLContext ctx) { + assertEquals(expectedSummaryFixReplicationOrigin, fetchFailureSummary(ctx, attemptIdForFixReplicationOrigin)); + assertEquals(expectedSummaryFixReplicationOriginAndManualCancellationType, + fetchFailureSummary(ctx, attemptIdForFixReplicationOriginAndManualCancellationType)); + assertEquals(expectedSummaryFixUnknownOriginAndUnknownType, fetchFailureSummary(ctx, attemptIdForFixUnknownOriginAndUnknownType)); + assertEquals(expectedSummaryFixMultipleSystemErrorType, fetchFailureSummary(ctx, attemptIdForFixMultipleSystemErrorType)); + assertEquals(expectedSummaryFixConfigErrorType, fetchFailureSummary(ctx, attemptIdForFixConfigErrorType)); + assertEquals(expectedSummaryNoChangeNeeded, fetchFailureSummary(ctx, attemptIdForNoChangeNeeded)); + assertEquals(expectedFixOriginAndLeaveUnrecognizedValue, fetchFailureSummary(ctx, attemptIdForFixOriginAndLeaveUnrecognizedValue)); + } + + private static void insertAttemptWithSummary(final DSLContext ctx, final Long attemptId, final AttemptFailureSummaryForMigration summary) { + ctx.insertInto(DSL.table("attempts")) + .columns( + DSL.field("id"), + DSL.field("failure_summary"), + DSL.field("job_id"), + DSL.field("attempt_number")) + .values( + attemptId, + JSONB.valueOf(Jsons.serialize(summary)), + currJobId, + 1) + .execute(); + + currJobId++; + } + + private static AttemptFailureSummaryForMigration fetchFailureSummary(final DSLContext ctx, final Long attemptId) { + final Record record = ctx.fetchOne(DSL.select(asterisk()) + .from(DSL.table("attempts")) + .where(DSL.field("id").eq(attemptId))); + + return Jsons.deserialize( + record.get(DSL.field("failure_summary", SQLDataType.JSONB.nullable(true))).data(), + AttemptFailureSummaryForMigration.class); + + } + + private static FailureReasonForMigration baseFailureReason() { + return new FailureReasonForMigration() + .withInternalMessage("some internal message") + .withExternalMessage("some external message") + .withRetryable(false) + .withTimestamp(timeNowMillis) + .withStacktrace("some stacktrace") + .withMetadata(new Metadata().withAdditionalProperty("key1", "value1")); + } + + private static AttemptFailureSummaryForMigration getFailureSummary(final FailureReasonForMigration... failureReasons) { + return new AttemptFailureSummaryForMigration() + .withPartialSuccess(false) + .withFailures(List.of(failureReasons)); + } + +} diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/job_tracker/JobTrackerTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/job_tracker/JobTrackerTest.java index 778c5fa4b195..db24f1868531 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/job_tracker/JobTrackerTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/job_tracker/JobTrackerTest.java @@ -346,7 +346,7 @@ void testAsynchronousAttemptWithFailures(final ConfigType configType, final Map< { put("failureOrigin", "source"); - put("failureType", "configError"); + put("failureType", "config_error"); put("internalMessage", "Internal config error error msg"); put("externalMessage", "Config error related msg"); put("metadata", ImmutableMap.of("some", "metadata")); @@ -360,7 +360,7 @@ void testAsynchronousAttemptWithFailures(final ConfigType configType, final Map< { put("failureOrigin", "replication"); - put("failureType", "systemError"); + put("failureType", "system_error"); put("internalMessage", "Internal system error error msg"); put("externalMessage", "System error related msg"); put("metadata", ImmutableMap.of("some", "metadata")); From 6c649d981fb34ddf52a361103ee7519c31eac34f Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Fri, 11 Mar 2022 22:09:42 +0100 Subject: [PATCH 24/38] Bump Airbyte version from 0.35.51-alpha to 0.35.52-alpha (#11075) Co-authored-by: benmoriceau --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 4 ++-- airbyte-container-orchestrator/Dockerfile | 6 +++--- airbyte-metrics/reporter/Dockerfile | 4 ++-- airbyte-scheduler/app/Dockerfile | 4 ++-- airbyte-server/Dockerfile | 4 ++-- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 4 ++-- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ 18 files changed, 44 insertions(+), 44 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 7fc133cf5a4c..1f97b7fd455d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.35.51-alpha +current_version = 0.35.52-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 32e477215504..bd361ceff14b 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.35.51-alpha +VERSION=0.35.52-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 640d9779d1ae..61d505732da5 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -5,6 +5,6 @@ ENV APPLICATION airbyte-bootloader WORKDIR /app -ADD bin/${APPLICATION}-0.35.51-alpha.tar /app +ADD bin/${APPLICATION}-0.35.52-alpha.tar /app -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index a47fe4c6757b..0220ffe424d5 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,12 +26,12 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl ENV APPLICATION airbyte-container-orchestrator -ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}" +ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}" WORKDIR /app # Move orchestrator app -ADD bin/${APPLICATION}-0.35.51-alpha.tar /app +ADD bin/${APPLICATION}-0.35.52-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 616a009d13a1..5fb87cf843bc 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-metrics-reporter WORKDIR /app -ADD bin/${APPLICATION}-0.35.51-alpha.tar /app +ADD bin/${APPLICATION}-0.35.52-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index 4ede9a045cde..0a272a39ef69 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-scheduler WORKDIR /app -ADD bin/${APPLICATION}-0.35.51-alpha.tar /app +ADD bin/${APPLICATION}-0.35.52-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index d7c48f467f5b..eeac4953e2fe 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -7,7 +7,7 @@ ENV APPLICATION airbyte-server WORKDIR /app -ADD bin/${APPLICATION}-0.35.51-alpha.tar /app +ADD bin/${APPLICATION}-0.35.52-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index b00b7e17d959..c0a5fb9e8e46 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.35.51-alpha", + "version": "0.35.52-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.35.51-alpha", + "version": "0.35.52-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^1.2.36", "@fortawesome/free-brands-svg-icons": "^5.15.4", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 23bbbd4a63f2..b6e60790f84e 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.35.51-alpha", + "version": "0.35.52-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 59060d50da4b..2f2bc9df128f 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -30,7 +30,7 @@ ENV APPLICATION airbyte-workers WORKDIR /app # Move worker app -ADD bin/${APPLICATION}-0.35.51-alpha.tar /app +ADD bin/${APPLICATION}-0.35.52-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.51-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 84ad43a50c88..c7add842b161 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.35.51-alpha" +appVersion: "0.35.52-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index f1b49197621b..640e671de08e 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -29,7 +29,7 @@ | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -71,7 +71,7 @@ | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -118,7 +118,7 @@ | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -156,7 +156,7 @@ | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -188,7 +188,7 @@ | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.51-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index bb240d21b623..9f3c106b2ef0 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.35.51-alpha + tag: 0.35.52-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.35.51-alpha + tag: 0.35.52-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.35.51-alpha + tag: 0.35.52-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.35.51-alpha + tag: 0.35.52-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.35.51-alpha + tag: 0.35.52-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 2a58b293dc54..cf731df7c3d5 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -101,7 +101,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.35.51-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.35.52-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 392ba4be0c50..fb98c8e0eae6 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.51-alpha +AIRBYTE_VERSION=0.35.52-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index f55592079ba4..01d959cbd377 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/bootloader - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/scheduler - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/server - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/webapp - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/worker - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index a052768e4249..298a0caa67bc 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.51-alpha +AIRBYTE_VERSION=0.35.52-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 80461932255b..26f6258758ba 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/bootloader - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/scheduler - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/server - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/webapp - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: airbyte/worker - newTag: 0.35.51-alpha + newTag: 0.35.52-alpha - name: temporalio/auto-setup newTag: 1.7.0 From 0e33cdcfe13c15afe2a7249e0e7ea53f4cc182a5 Mon Sep 17 00:00:00 2001 From: Greg Solovyev Date: Fri, 11 Mar 2022 15:12:07 -0800 Subject: [PATCH 25/38] destination-s3: add a test for listObjects permission on destination bucket (#10856) * Add a test for listObjects permission to destination-s3 connector * add testIAMUserHasListObjectPermission method to S3Destination and call this method from S3Destination::check. Method throws an exception if IAM user does not have listObjects permission on the destination bucket * add a unit test to S3DestinationTest to verify that S3Destination::check fails if listObjects throws an exception * add a unit test to S3DestinationTest to verify that S3Destination::check succeeds if listObjects succeeds * Add S3DestinationConfigFactory in order to be able to mock S3 client used in S3Destination::check * Addressing review comments: - separate positive and negative unit tests - fix formatting - reuse s3 client for both positive and negative tests * Add information about PR #10856 to the changelog * Prepare for publishing new version: * Bump version to 0.2.10 in Dockerfile * Bump version to 0.2.10 in changelog * Update destination-s3 version in connector index * Update seed spec for destination-s3 connector --- .../seed/destination_definitions.yaml | 2 +- .../resources/seed/destination_specs.yaml | 2 +- .../connectors/destination-s3/Dockerfile | 2 +- .../destination/s3/S3Destination.java | 22 +++++- .../destination/s3/S3DestinationConfig.java | 19 +++-- .../s3/S3DestinationConfigFactory.java | 15 ++++ .../destination/s3/S3DestinationTest.java | 70 ++++++++++++++++++- docs/integrations/destinations/s3.md | 53 +++++++------- 8 files changed, 150 insertions(+), 35 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfigFactory.java diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 9c046b76f5d9..c3d691f76143 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -203,7 +203,7 @@ - name: S3 destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 dockerRepository: airbyte/destination-s3 - dockerImageTag: 0.2.9 + dockerImageTag: 0.2.10 documentationUrl: https://docs.airbyte.io/integrations/destinations/s3 icon: s3.svg resourceRequirements: diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 69d55337b614..02f8e07a10d2 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3454,7 +3454,7 @@ supported_destination_sync_modes: - "append" - "overwrite" -- dockerImage: "airbyte/destination-s3:0.2.9" +- dockerImage: "airbyte/destination-s3:0.2.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-s3/Dockerfile b/airbyte-integrations/connectors/destination-s3/Dockerfile index 7ad019a76a77..ffd5d8c7eb1c 100644 --- a/airbyte-integrations/connectors/destination-s3/Dockerfile +++ b/airbyte-integrations/connectors/destination-s3/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-s3 COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.9 +LABEL io.airbyte.version=0.2.10 LABEL io.airbyte.name=airbyte/destination-s3 diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java index 4efd3c43a1d3..97ed6f6dac42 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java @@ -7,6 +7,7 @@ import alex.mojaki.s3upload.MultiPartOutputStream; import alex.mojaki.s3upload.StreamTransferManager; import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.ListObjectsRequest; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import io.airbyte.integrations.BaseConnector; @@ -33,6 +34,15 @@ public class S3Destination extends BaseConnector implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(S3Destination.class); + private final S3DestinationConfigFactory configFactory; + + public S3Destination() { + this.configFactory = new S3DestinationConfigFactory(); + } + + public S3Destination(final S3DestinationConfigFactory configFactory) { + this.configFactory = configFactory; + } public static void main(final String[] args) throws Exception { new IntegrationRunner(new S3Destination()).run(args); @@ -41,9 +51,12 @@ public static void main(final String[] args) throws Exception { @Override public AirbyteConnectionStatus check(final JsonNode config) { try { - final S3DestinationConfig destinationConfig = S3DestinationConfig.getS3DestinationConfig(config); + final S3DestinationConfig destinationConfig = this.configFactory.getS3DestinationConfig(config); final AmazonS3 s3Client = destinationConfig.getS3Client(); + // Test for listObjects permission + testIAMUserHasListObjectPermission(s3Client, destinationConfig.getBucketName()); + // Test single upload (for small files) permissions testSingleUpload(s3Client, destinationConfig.getBucketName()); @@ -60,6 +73,13 @@ public AirbyteConnectionStatus check(final JsonNode config) { } } + public static void testIAMUserHasListObjectPermission(final AmazonS3 s3Client, final String bucketName) { + LOGGER.info("Started testing if IAM user can call listObjects on the destination bucket"); + final ListObjectsRequest request = new ListObjectsRequest().withBucketName(bucketName).withMaxKeys(1); + s3Client.listObjects(request); + LOGGER.info("Finished checking for listObjects permission"); + } + public static void testSingleUpload(final AmazonS3 s3Client, final String bucketName) { LOGGER.info("Started testing if all required credentials assigned to user for single file uploading"); final String testFile = "test_" + System.currentTimeMillis(); diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java index 82d8c9c4a1fa..9fea2e1ec844 100644 --- a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfig.java @@ -64,7 +64,8 @@ public S3DestinationConfig(final String endpoint, final String accessKeyId, final String secretAccessKey, final Integer partSize, - final S3FormatConfig formatConfig) { + final S3FormatConfig formatConfig, + final AmazonS3 s3Client) { this.endpoint = endpoint; this.bucketName = bucketName; this.bucketPath = bucketPath; @@ -73,6 +74,18 @@ public S3DestinationConfig(final String endpoint, this.secretAccessKey = secretAccessKey; this.formatConfig = formatConfig; this.partSize = partSize; + this.s3Client = s3Client; + } + + public S3DestinationConfig(final String endpoint, + final String bucketName, + final String bucketPath, + final String bucketRegion, + final String accessKeyId, + final String secretAccessKey, + final Integer partSize, + final S3FormatConfig formatConfig) { + this(endpoint, bucketName, bucketPath, bucketRegion, accessKeyId, secretAccessKey, partSize, formatConfig, null); } public static S3DestinationConfig getS3DestinationConfig(final JsonNode config) { @@ -166,9 +179,7 @@ protected AmazonS3 createS3Client() { return AmazonS3ClientBuilder.standard() .withCredentials(new InstanceProfileCredentialsProvider(false)) .build(); - } - - else if (endpoint == null || endpoint.isEmpty()) { + } else if (endpoint == null || endpoint.isEmpty()) { return AmazonS3ClientBuilder.standard() .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) .withRegion(bucketRegion) diff --git a/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfigFactory.java b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfigFactory.java new file mode 100644 index 000000000000..a1a4cb4668ad --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConfigFactory.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3; + +import com.fasterxml.jackson.databind.JsonNode; + +public class S3DestinationConfigFactory { + + public S3DestinationConfig getS3DestinationConfig(final JsonNode config) { + return S3DestinationConfig.getS3DestinationConfig(config); + } + +} diff --git a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java index 6b385831b791..d2c710df866e 100644 --- a/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java +++ b/airbyte-integrations/connectors/destination-s3/src/test/java/io/airbyte/integrations/destination/s3/S3DestinationTest.java @@ -4,13 +4,26 @@ package io.airbyte.integrations.destination.s3; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadResult; +import com.amazonaws.services.s3.model.ListObjectsRequest; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; @@ -25,6 +38,11 @@ public class S3DestinationTest { @BeforeEach public void setup() { s3 = mock(AmazonS3.class); + final InitiateMultipartUploadResult uploadResult = mock(InitiateMultipartUploadResult.class); + final UploadPartResult uploadPartResult = mock(UploadPartResult.class); + when(s3.uploadPart(any(UploadPartRequest.class))).thenReturn(uploadPartResult); + when(s3.initiateMultipartUpload(any(InitiateMultipartUploadRequest.class))).thenReturn(uploadResult); + config = new S3DestinationConfig( "fake-endpoint", "fake-bucket", @@ -32,7 +50,57 @@ public void setup() { "fake-region", "fake-accessKeyId", "fake-secretAccessKey", - null); + S3DestinationConfig.DEFAULT_PART_SIZE_MB, null, s3); + } + + @Test + /** + * Test that check will fail if IAM user does not have listObjects permission + */ + public void checksS3WithoutListObjectPermission() { + final S3Destination destinationFail = new S3Destination(new S3DestinationConfigFactory() { + + public S3DestinationConfig getS3DestinationConfig(final JsonNode config) { + return new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + "fake-bucketPath", + "fake-region", + "fake-accessKeyId", + "fake-secretAccessKey", + S3DestinationConfig.DEFAULT_PART_SIZE_MB, + null, s3); + } + + }); + doThrow(new AmazonS3Exception("Access Denied")).when(s3).listObjects(any(ListObjectsRequest.class)); + final AirbyteConnectionStatus status = destinationFail.check(null); + assertEquals(Status.FAILED, status.getStatus(), "Connection check should have failed"); + assertTrue(status.getMessage().indexOf("Access Denied") > 0, "Connection check returned wrong failure message"); + } + + @Test + /** + * Test that check will succeed when IAM user has all required permissions + */ + public void checksS3WithListObjectPermission() { + final S3Destination destinationSuccess = new S3Destination(new S3DestinationConfigFactory() { + + public S3DestinationConfig getS3DestinationConfig(final JsonNode config) { + return new S3DestinationConfig( + "fake-endpoint", + "fake-bucket", + "fake-bucketPath", + "fake-region", + "fake-accessKeyId", + "fake-secretAccessKey", + S3DestinationConfig.DEFAULT_PART_SIZE_MB, + null, s3); + } + + }); + final AirbyteConnectionStatus status = destinationSuccess.check(null); + assertEquals(Status.SUCCEEDED, status.getStatus(), "Connection check should have succeeded"); } @Test diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index 695ae8c06cd7..966241592ab1 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -224,31 +224,32 @@ Under the hood, an Airbyte data stream in Json schema is first converted to an A ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:--------| :--- | :--- | :--- | -| 0.2.7 | 2022-02-14 | [\#10318](https://github.com/airbytehq/airbyte/pull/10318) | Prevented double slashes in S3 destination path | -| 0.2.6 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.2.5 | 2022-01-13 | [\#9399](https://github.com/airbytehq/airbyte/pull/9399) | Use instance profile authentication if credentials are not provided | -| 0.2.4 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | -| 0.2.3 | 2022-01-11 | [\#9367](https://github.com/airbytehq/airbyte/pull/9367) | Avro & Parquet: support array field with unknown item type; default any improperly typed field to string. | -| 0.2.2 | 2021-12-21 | [\#8574](https://github.com/airbytehq/airbyte/pull/8574) | Added namespace to Avro and Parquet record types | -| 0.2.1 | 2021-12-20 | [\#8974](https://github.com/airbytehq/airbyte/pull/8974) | Release a new version to ensure there is no excessive logging. | +| Version | Date | Pull Request | Subject | +|:--------| :--- | :--- |:---------------------------------------------------------------------------------------------------------------------------| +| 0.2.10 | 2022-03-07 | [\#10856](https://github.com/airbytehq/airbyte/pull/10856) | `check` method now tests for listObjects permissions on the target bucket | +| 0.2.7 | 2022-02-14 | [\#10318](https://github.com/airbytehq/airbyte/pull/10318) | Prevented double slashes in S3 destination path | +| 0.2.6 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.2.5 | 2022-01-13 | [\#9399](https://github.com/airbytehq/airbyte/pull/9399) | Use instance profile authentication if credentials are not provided | +| 0.2.4 | 2022-01-12 | [\#9415](https://github.com/airbytehq/airbyte/pull/9415) | BigQuery Destination : Fix GCS processing of Facebook data | +| 0.2.3 | 2022-01-11 | [\#9367](https://github.com/airbytehq/airbyte/pull/9367) | Avro & Parquet: support array field with unknown item type; default any improperly typed field to string. | +| 0.2.2 | 2021-12-21 | [\#8574](https://github.com/airbytehq/airbyte/pull/8574) | Added namespace to Avro and Parquet record types | +| 0.2.1 | 2021-12-20 | [\#8974](https://github.com/airbytehq/airbyte/pull/8974) | Release a new version to ensure there is no excessive logging. | | 0.2.0 | 2021-12-15 | [\#8607](https://github.com/airbytehq/airbyte/pull/8607) | Change the output filename for CSV files - it's now `bucketPath/namespace/streamName/timestamp_epochMillis_randomUuid.csv` | -| 0.1.16 | 2021-12-10 | [\#8562](https://github.com/airbytehq/airbyte/pull/8562) | Swap dependencies with destination-jdbc. | -| 0.1.15 | 2021-12-03 | [\#8501](https://github.com/airbytehq/airbyte/pull/8501) | Remove excessive logging for Avro and Parquet invalid date strings. | -| 0.1.14 | 2021-11-09 | [\#7732](https://github.com/airbytehq/airbyte/pull/7732) | Support timestamp in Avro and Parquet | -| 0.1.13 | 2021-11-03 | [\#7288](https://github.com/airbytehq/airbyte/issues/7288) | Support Json `additionalProperties`. | -| 0.1.12 | 2021-09-13 | [\#5720](https://github.com/airbytehq/airbyte/issues/5720) | Added configurable block size for stream. Each stream is limited to 10,000 by S3 | -| 0.1.11 | 2021-09-10 | [\#5729](https://github.com/airbytehq/airbyte/pull/5729) | For field names that start with a digit, a `_` will be appended at the beginning for the`Parquet` and `Avro` formats. | -| 0.1.10 | 2021-08-17 | [\#4699](https://github.com/airbytehq/airbyte/pull/4699) | Added json config validator | -| 0.1.9 | 2021-07-12 | [\#4666](https://github.com/airbytehq/airbyte/pull/4666) | Fix MinIO output for Parquet format. | -| 0.1.8 | 2021-07-07 | [\#4613](https://github.com/airbytehq/airbyte/pull/4613) | Patched schema converter to support combined restrictions. | -| 0.1.7 | 2021-06-23 | [\#4227](https://github.com/airbytehq/airbyte/pull/4227) | Added Avro and JSONL output. | -| 0.1.6 | 2021-06-16 | [\#4130](https://github.com/airbytehq/airbyte/pull/4130) | Patched the check to verify prefix access instead of full-bucket access. | -| 0.1.5 | 2021-06-14 | [\#3908](https://github.com/airbytehq/airbyte/pull/3908) | Fixed default `max_padding_size_mb` in `spec.json`. | -| 0.1.4 | 2021-06-14 | [\#3908](https://github.com/airbytehq/airbyte/pull/3908) | Added Parquet output. | -| 0.1.3 | 2021-06-13 | [\#4038](https://github.com/airbytehq/airbyte/pull/4038) | Added support for alternative S3. | -| 0.1.2 | 2021-06-10 | [\#4029](https://github.com/airbytehq/airbyte/pull/4029) | Fixed `_airbyte_emitted_at` field to be a UTC instead of local timestamp for consistency. | -| 0.1.1 | 2021-06-09 | [\#3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` in base Docker image for Kubernetes support. | -| 0.1.0 | 2021-06-03 | [\#3672](https://github.com/airbytehq/airbyte/pull/3672) | Initial release with CSV output. | +| 0.1.16 | 2021-12-10 | [\#8562](https://github.com/airbytehq/airbyte/pull/8562) | Swap dependencies with destination-jdbc. | +| 0.1.15 | 2021-12-03 | [\#8501](https://github.com/airbytehq/airbyte/pull/8501) | Remove excessive logging for Avro and Parquet invalid date strings. | +| 0.1.14 | 2021-11-09 | [\#7732](https://github.com/airbytehq/airbyte/pull/7732) | Support timestamp in Avro and Parquet | +| 0.1.13 | 2021-11-03 | [\#7288](https://github.com/airbytehq/airbyte/issues/7288) | Support Json `additionalProperties`. | +| 0.1.12 | 2021-09-13 | [\#5720](https://github.com/airbytehq/airbyte/issues/5720) | Added configurable block size for stream. Each stream is limited to 10,000 by S3 | +| 0.1.11 | 2021-09-10 | [\#5729](https://github.com/airbytehq/airbyte/pull/5729) | For field names that start with a digit, a `_` will be appended at the beginning for the`Parquet` and `Avro` formats. | +| 0.1.10 | 2021-08-17 | [\#4699](https://github.com/airbytehq/airbyte/pull/4699) | Added json config validator | +| 0.1.9 | 2021-07-12 | [\#4666](https://github.com/airbytehq/airbyte/pull/4666) | Fix MinIO output for Parquet format. | +| 0.1.8 | 2021-07-07 | [\#4613](https://github.com/airbytehq/airbyte/pull/4613) | Patched schema converter to support combined restrictions. | +| 0.1.7 | 2021-06-23 | [\#4227](https://github.com/airbytehq/airbyte/pull/4227) | Added Avro and JSONL output. | +| 0.1.6 | 2021-06-16 | [\#4130](https://github.com/airbytehq/airbyte/pull/4130) | Patched the check to verify prefix access instead of full-bucket access. | +| 0.1.5 | 2021-06-14 | [\#3908](https://github.com/airbytehq/airbyte/pull/3908) | Fixed default `max_padding_size_mb` in `spec.json`. | +| 0.1.4 | 2021-06-14 | [\#3908](https://github.com/airbytehq/airbyte/pull/3908) | Added Parquet output. | +| 0.1.3 | 2021-06-13 | [\#4038](https://github.com/airbytehq/airbyte/pull/4038) | Added support for alternative S3. | +| 0.1.2 | 2021-06-10 | [\#4029](https://github.com/airbytehq/airbyte/pull/4029) | Fixed `_airbyte_emitted_at` field to be a UTC instead of local timestamp for consistency. | +| 0.1.1 | 2021-06-09 | [\#3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` in base Docker image for Kubernetes support. | +| 0.1.0 | 2021-06-03 | [\#3672](https://github.com/airbytehq/airbyte/pull/3672) | Initial release with CSV output. | From 3bde91006b92dcf82ad5938e8fc70d755ef47975 Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Fri, 11 Mar 2022 15:56:03 -0800 Subject: [PATCH 26/38] Revert "Revert "Revert "Remove the attemptId notion in the connectionManagerWorkflow (#10780)" (#11057)" (#11073)" (#11081) This reverts commit e27bb7405073b89f21c5b8435789e0f190585ff8. --- .../scheduler/persistence/JobPersistence.java | 6 +- airbyte-workers/build.gradle | 2 +- .../ConnectionManagerWorkflowImpl.java | 134 +-- .../activities/GenerateInputActivity.java | 17 - .../activities/GenerateInputActivityImpl.java | 8 - .../JobCreationAndStatusUpdateActivity.java | 70 -- ...obCreationAndStatusUpdateActivityImpl.java | 43 - .../state/WorkflowInternalState.java | 2 +- .../ConnectionManagerWorkflowTest.java | 74 +- .../scheduling/WorkflowReplayingTest.java | 24 - ...obCreationAndStatusUpdateActivityTest.java | 52 - .../src/test/resources/workflowHistory.json | 939 ------------------ 12 files changed, 67 insertions(+), 1304 deletions(-) delete mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java delete mode 100644 airbyte-workers/src/test/resources/workflowHistory.json diff --git a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java index 20db344f2fd6..cda5b52f5864 100644 --- a/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java +++ b/airbyte-scheduler/persistence/src/main/java/io/airbyte/scheduler/persistence/JobPersistence.java @@ -75,12 +75,12 @@ public interface JobPersistence { // /** - * Create a new attempt for a job and return its attempt number. Throws - * {@link IllegalStateException} if the job is already in a terminal state. + * Create a new attempt for a job. Throws {@link IllegalStateException} if the job is already in a + * terminal state. * * @param jobId job for which an attempt will be created * @param logPath path where logs should be written for the attempt - * @return The attempt number of the created attempt (see {@link DefaultJobPersistence}) + * @return id of the attempt * @throws IOException exception due to interaction with persistence */ int createAttempt(long jobId, Path logPath) throws IOException; diff --git a/airbyte-workers/build.gradle b/airbyte-workers/build.gradle index ea4d418ed7c0..e37a5b9a4482 100644 --- a/airbyte-workers/build.gradle +++ b/airbyte-workers/build.gradle @@ -33,7 +33,7 @@ dependencies { implementation project(':airbyte-scheduler:models') testImplementation 'io.temporal:temporal-testing:1.8.1' - testImplementation 'com.jayway.jsonpath:json-path:2.7.0' + testImplementation 'io.temporal:temporal-testing-junit5:1.5.0' // versioned separately from rest of temporal testImplementation "org.flywaydb:flyway-core:7.14.0" testImplementation 'org.mockito:mockito-inline:4.0.0' testImplementation 'org.postgresql:postgresql:42.2.18' diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index 16ecec71a2fb..c654f034880b 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -21,20 +21,15 @@ import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; -import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobFailureInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.ReportJobStartInput; import io.airbyte.workers.temporal.scheduling.shared.ActivityConfiguration; import io.airbyte.workers.temporal.scheduling.state.WorkflowInternalState; @@ -65,9 +60,6 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow private static final int TASK_QUEUE_CHANGE_CURRENT_VERSION = 1; private static final int AUTO_DISABLE_FAILING_CONNECTION_CHANGE_CURRENT_VERSION = 1; - private static final String RENAME_ATTEMPT_ID_TO_NUMBER_TAG = "rename_attempt_id_to_number"; - private static final int RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION = 1; - private WorkflowState workflowState = new WorkflowState(UUID.randomUUID(), new NoopStateListener()); private final WorkflowInternalState workflowInternalState = new WorkflowInternalState(); @@ -155,7 +147,7 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn workflowInternalState.setJobId(getOrCreateJobId(connectionUpdaterInput)); - workflowInternalState.setAttemptNumber(createAttempt(workflowInternalState.getJobId())); + workflowInternalState.setAttemptId(createAttemptId(workflowInternalState.getJobId())); final GeneratedJobInput jobInputs = getJobInput(); @@ -191,13 +183,13 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn af.getActivityType(), af.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber())); + workflowInternalState.getAttemptId())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } else { workflowInternalState.getFailures().add( FailureHelper.unknownOriginFailure(childWorkflowFailure.getCause(), workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber())); + workflowInternalState.getAttemptId())); reportFailure(connectionUpdaterInput, standardSyncOutput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } @@ -207,41 +199,20 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn private void reportSuccess(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { workflowState.setSuccess(true); - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput)); - } else { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccessWithAttemptNumber, new JobSuccessInputWithAttemptNumber( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput)); - } + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobSuccess, new JobSuccessInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptId(), + standardSyncOutput)); resetNewConnectionInput(connectionUpdaterInput); } private void reportFailure(final ConnectionUpdaterInput connectionUpdaterInput, final StandardSyncOutput standardSyncOutput) { - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput, - FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); - } else { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailureWithAttemptNumber, new AttemptNumberFailureInput( - workflowInternalState.getJobId(), - workflowInternalState.getAttemptNumber(), - standardSyncOutput, - FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); - } + runMandatoryActivity(jobCreationAndStatusUpdateActivity::attemptFailure, new AttemptFailureInput( + workflowInternalState.getJobId(), + workflowInternalState.getAttemptId(), + standardSyncOutput, + FailureHelper.failureSummary(workflowInternalState.getFailures(), workflowInternalState.getPartialSuccess()))); final int maxAttempt = configFetchActivity.getMaxAttempt().getMaxAttempt(); final int attemptNumber = connectionUpdaterInput.getAttemptNumber(); @@ -335,20 +306,20 @@ public WorkflowState getState() { @Override public JobInformation getJobInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final Integer attemptId = workflowInternalState.getAttemptId(); return new JobInformation( jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber); + attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId); } @Override public QuarantinedInformation getQuarantinedInformation() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final Integer attemptId = workflowInternalState.getAttemptId(); return new QuarantinedInformation( connectionId, jobId == null ? NON_RUNNING_JOB_ID : jobId, - attemptNumber == null ? NON_RUNNING_ATTEMPT_ID : attemptNumber, + attemptId == null ? NON_RUNNING_ATTEMPT_ID : attemptId, workflowState.isQuarantined()); } @@ -448,31 +419,15 @@ private Long getOrCreateJobId(final ConnectionUpdaterInput connectionUpdaterInpu /** * Create a new attempt for a given jobId - * - * @param jobId - the jobId associated with the new attempt - * - * @return The attempt number */ - private Integer createAttempt(final long jobId) { - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - // Retrieve the attempt number but name it attempt id - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - final AttemptCreationOutput attemptCreationOutput = - runMandatoryActivityWithOutput( - jobCreationAndStatusUpdateActivity::createNewAttempt, - new AttemptCreationInput( - jobId)); - return attemptCreationOutput.getAttemptId(); - } - - final AttemptNumberCreationOutput attemptNumberCreationOutput = + private Integer createAttemptId(final long jobId) { + final AttemptCreationOutput attemptCreationOutput = runMandatoryActivityWithOutput( - jobCreationAndStatusUpdateActivity::createNewAttemptNumber, + jobCreationAndStatusUpdateActivity::createNewAttempt, new AttemptCreationInput( jobId)); - return attemptNumberCreationOutput.getAttemptNumber(); + + return attemptCreationOutput.getAttemptId(); } /** @@ -481,30 +436,14 @@ private Integer createAttempt(final long jobId) { */ private GeneratedJobInput getJobInput() { final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - final SyncInput getSyncInputActivitySyncInput = new SyncInput( - attemptNumber, - jobId, - workflowState.isResetConnection()); - - final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( - getSyncInputActivity::getSyncWorkflowInput, - getSyncInputActivitySyncInput); - - return syncWorkflowInputs; - } - - final SyncInputWithAttemptNumber getSyncInputActivitySyncInput = new SyncInputWithAttemptNumber( - attemptNumber, + final Integer attemptId = workflowInternalState.getAttemptId(); + final SyncInput getSyncInputActivitySyncInput = new SyncInput( + attemptId, jobId, workflowState.isResetConnection()); final GeneratedJobInput syncWorkflowInputs = runMandatoryActivityWithOutput( - getSyncInputActivity::getSyncWorkflowInputWithAttemptNumber, + getSyncInputActivity::getSyncWorkflowInput, getSyncInputActivitySyncInput); return syncWorkflowInputs; @@ -590,25 +529,14 @@ private void deleteConnectionBeforeTerminatingTheWorkflow() { private void reportCancelledAndContinueWith(final boolean isReset, final ConnectionUpdaterInput connectionUpdaterInput) { workflowState.setContinueAsReset(isReset); final Long jobId = workflowInternalState.getJobId(); - final Integer attemptNumber = workflowInternalState.getAttemptNumber(); + final Integer attemptId = workflowInternalState.getAttemptId(); final Set failures = workflowInternalState.getFailures(); final Boolean partialSuccess = workflowInternalState.getPartialSuccess(); - final int attemptCreationVersion = - Workflow.getVersion(RENAME_ATTEMPT_ID_TO_NUMBER_TAG, Workflow.DEFAULT_VERSION, RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION); - - if (attemptCreationVersion < RENAME_ATTEMPT_ID_TO_NUMBER_CURRENT_VERSION) { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, - new JobCancelledInput( - jobId, - attemptNumber, - FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); - } else { - runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelledWithAttemptNumber, - new JobCancelledInputWithAttemptNumber( - jobId, - attemptNumber, - FailureHelper.failureSummaryForCancellation(jobId, attemptNumber, failures, partialSuccess))); - } + runMandatoryActivity(jobCreationAndStatusUpdateActivity::jobCancelled, + new JobCancelledInput( + jobId, + attemptId, + FailureHelper.failureSummaryForCancellation(jobId, attemptId, failures, partialSuccess))); resetNewConnectionInput(connectionUpdaterInput); prepareForNextRunAndContinueAsNew(connectionUpdaterInput); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java index 60191311ef34..0a1ed70c3008 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java @@ -27,17 +27,6 @@ class SyncInput { } - @Data - @NoArgsConstructor - @AllArgsConstructor - class SyncInputWithAttemptNumber { - - private int attemptNumber; - private long jobId; - private boolean reset; - - } - @Data @NoArgsConstructor @AllArgsConstructor @@ -56,10 +45,4 @@ class GeneratedJobInput { @ActivityMethod GeneratedJobInput getSyncWorkflowInput(SyncInput input); - /** - * This generate the input needed by the child sync workflow - */ - @ActivityMethod - GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(SyncInputWithAttemptNumber input); - } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java index bcb939d022c8..30edaf108adc 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivityImpl.java @@ -76,12 +76,4 @@ public GeneratedJobInput getSyncWorkflowInput(final SyncInput input) { } } - @Override - public GeneratedJobInput getSyncWorkflowInputWithAttemptNumber(final SyncInputWithAttemptNumber input) { - return getSyncWorkflowInput(new SyncInput( - input.getAttemptNumber(), - input.getJobId(), - input.isReset())); - } - } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java index 2de587c2a15b..aa45b53b0e8c 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java @@ -72,24 +72,6 @@ class AttemptCreationOutput { @ActivityMethod AttemptCreationOutput createNewAttempt(AttemptCreationInput input) throws RetryableException; - @Data - @NoArgsConstructor - @AllArgsConstructor - class AttemptNumberCreationOutput { - - private int attemptNumber; - - } - - /** - * Create a new attempt for a given job ID - * - * @param input POJO containing the jobId - * @return A POJO containing the attemptNumber - */ - @ActivityMethod - AttemptNumberCreationOutput createNewAttemptNumber(AttemptCreationInput input) throws RetryableException; - @Data @NoArgsConstructor @AllArgsConstructor @@ -107,23 +89,6 @@ class JobSuccessInput { @ActivityMethod void jobSuccess(JobSuccessInput input); - @Data - @NoArgsConstructor - @AllArgsConstructor - class JobSuccessInputWithAttemptNumber { - - private long jobId; - private int attemptNumber; - private StandardSyncOutput standardSyncOutput; - - } - - /** - * Set a job status as successful - */ - @ActivityMethod - void jobSuccessWithAttemptNumber(JobSuccessInputWithAttemptNumber input); - @Data @NoArgsConstructor @AllArgsConstructor @@ -158,24 +123,6 @@ class AttemptFailureInput { @ActivityMethod void attemptFailure(AttemptFailureInput input); - @Data - @NoArgsConstructor - @AllArgsConstructor - class AttemptNumberFailureInput { - - private long jobId; - private int attemptNumber; - private StandardSyncOutput standardSyncOutput; - private AttemptFailureSummary attemptFailureSummary; - - } - - /** - * Set an attempt status as failed - */ - @ActivityMethod - void attemptFailureWithAttemptNumber(AttemptNumberFailureInput input); - @Data @NoArgsConstructor @AllArgsConstructor @@ -193,23 +140,6 @@ class JobCancelledInput { @ActivityMethod void jobCancelled(JobCancelledInput input); - @Data - @NoArgsConstructor - @AllArgsConstructor - class JobCancelledInputWithAttemptNumber { - - private long jobId; - private int attemptNumber; - private AttemptFailureSummary attemptFailureSummary; - - } - - /** - * Set a job status as cancelled - */ - @ActivityMethod - void jobCancelledWithAttemptNumber(JobCancelledInputWithAttemptNumber input); - @Data @NoArgsConstructor @AllArgsConstructor diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java index b9a76065e776..e02796675174 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java @@ -128,24 +128,6 @@ public AttemptCreationOutput createNewAttempt(final AttemptCreationInput input) } } - @Override - public AttemptNumberCreationOutput createNewAttemptNumber(final AttemptCreationInput input) throws RetryableException { - try { - final long jobId = input.getJobId(); - final Job createdJob = jobPersistence.getJob(jobId); - - final WorkerRun workerRun = temporalWorkerRunFactory.create(createdJob); - final Path logFilePath = workerRun.getJobRoot().resolve(LogClientSingleton.LOG_FILENAME); - final int persistedAttemptNumber = jobPersistence.createAttempt(jobId, logFilePath); - emitJobIdToReleaseStagesMetric(MetricsRegistry.ATTEMPT_CREATED_BY_RELEASE_STAGE, jobId); - - LogClientSingleton.getInstance().setJobMdc(workerEnvironment, logConfigs, workerRun.getJobRoot()); - return new AttemptNumberCreationOutput(persistedAttemptNumber); - } catch (final IOException e) { - throw new RetryableException(e); - } - } - @Override public void jobSuccess(final JobSuccessInput input) { try { @@ -170,14 +152,6 @@ public void jobSuccess(final JobSuccessInput input) { } } - @Override - public void jobSuccessWithAttemptNumber(final JobSuccessInputWithAttemptNumber input) { - jobSuccess(new JobSuccessInput( - input.getJobId(), - input.getAttemptNumber(), - input.getStandardSyncOutput())); - } - @Override public void jobFailure(final JobFailureInput input) { try { @@ -217,15 +191,6 @@ public void attemptFailure(final AttemptFailureInput input) { } } - @Override - public void attemptFailureWithAttemptNumber(final AttemptNumberFailureInput input) { - attemptFailure(new AttemptFailureInput( - input.getJobId(), - input.getAttemptNumber(), - input.getStandardSyncOutput(), - input.getAttemptFailureSummary())); - } - @Override public void jobCancelled(final JobCancelledInput input) { try { @@ -244,14 +209,6 @@ public void jobCancelled(final JobCancelledInput input) { } } - @Override - public void jobCancelledWithAttemptNumber(final JobCancelledInputWithAttemptNumber input) { - jobCancelled(new JobCancelledInput( - input.getJobId(), - input.getAttemptNumber(), - input.getAttemptFailureSummary())); - } - @Override public void reportJobStart(final ReportJobStartInput input) { try { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java index d9bdcb2d5800..822bbfe0ba48 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/state/WorkflowInternalState.java @@ -17,7 +17,7 @@ public class WorkflowInternalState { private Long jobId = null; - private Integer attemptNumber = null; + private Integer attemptId = null; // StandardSyncOutput standardSyncOutput = null; private final Set failures = new HashSet<>(); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index 8353b9116940..ca897b5afe80 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -16,14 +16,13 @@ import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; -import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInputWithAttemptNumber; +import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInputWithAttemptNumber; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; +import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobSuccessInputWithAttemptNumber; import io.airbyte.workers.temporal.scheduling.state.WorkflowState; import io.airbyte.workers.temporal.scheduling.state.listener.TestStateListener; import io.airbyte.workers.temporal.scheduling.state.listener.WorkflowStateChangedListener.ChangedStateEvent; @@ -122,11 +121,11 @@ public void setUp() { .thenReturn(new JobCreationOutput( 1L)); - Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) - .thenReturn(new AttemptNumberCreationOutput( + Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) + .thenReturn(new AttemptCreationOutput( 1)); - Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) + Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any(SyncInput.class))) .thenReturn( new GeneratedJobInput( new JobRunConfig(), @@ -511,8 +510,7 @@ public void cancelRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.CANCELLED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .jobCancelledWithAttemptNumber(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.argThat(new HasCancellationFailure(JOB_ID, ATTEMPT_ID))); } @RepeatedTest(10) @@ -596,7 +594,7 @@ public void resetCancelRunningWorkflow() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.RESET && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()); } @@ -689,7 +687,7 @@ public void updatedSignalReceivedWhileRunning() throws InterruptedException { .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.UPDATED && changedStateEvent.isValue()) .hasSizeGreaterThanOrEqualTo(1); - Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)); + Mockito.verify(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()); } } @@ -749,10 +747,8 @@ public void testSourceAndDestinationFailuresRecorded() throws InterruptedExcepti workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.SOURCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); } @RepeatedTest(10) @@ -787,8 +783,7 @@ public void testNormalizationFailure() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); } @RepeatedTest(10) @@ -823,8 +818,7 @@ public void testDbtFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); } @RepeatedTest(10) @@ -859,8 +853,7 @@ public void testPersistenceFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.PERSISTENCE))); } @RepeatedTest(10) @@ -895,8 +888,7 @@ public void testReplicationFailureRecorded() throws InterruptedException { workflow.submitManualSync(); testEnv.sleep(Duration.ofMinutes(1L)); // any time after no-waiting manual run - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); + Mockito.verify(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.REPLICATION))); } } @@ -914,13 +906,12 @@ public static Stream getSetupFailingFailingActivityBeforeRun() { return Stream.of( Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewJob(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), - Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttemptNumber(Mockito.any())) + Arguments.of(new Thread(() -> Mockito.when(mJobCreationAndStatusUpdateActivity.createNewAttempt(Mockito.any())) .thenThrow(ApplicationFailure.newNonRetryableFailure("", "")))), Arguments.of(new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mJobCreationAndStatusUpdateActivity).reportJobStart(Mockito.any()))), - Arguments.of(new Thread( - () -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInputWithAttemptNumber(Mockito.any(SyncInputWithAttemptNumber.class))) - .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); + Arguments.of(new Thread(() -> Mockito.when(mGenerateInputActivityImpl.getSyncWorkflowInput(Mockito.any())) + .thenThrow(ApplicationFailure.newNonRetryableFailure("", ""))))); } @ParameterizedTest @@ -1005,10 +996,10 @@ public static Stream getSetupFailingFailingActivityAfterRun() { return Stream.of( Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> System.out.println("do Nothing")), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobSuccessWithAttemptNumber(Mockito.any(JobSuccessInputWithAttemptNumber.class)))), + .when(mJobCreationAndStatusUpdateActivity).jobSuccess(Mockito.any()))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.cancelJob()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).jobCancelledWithAttemptNumber(Mockito.any(JobCancelledInputWithAttemptNumber.class)))), + .when(mJobCreationAndStatusUpdateActivity).jobCancelled(Mockito.any()))), Arguments.of((Consumer) ((ConnectionManagerWorkflow workflow) -> workflow.deleteConnection()), new Thread(() -> Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) .when(mConnectionDeletionActivity).deleteConnection(Mockito.any())))); @@ -1053,7 +1044,7 @@ void testGetStuckAfterRun(final Consumer signalSender Assertions.assertThat(events) .filteredOn(changedStateEvent -> changedStateEvent.getField() == StateField.QUARANTINED && changedStateEvent.isValue()) - .hasSizeGreaterThanOrEqualTo(1); + .hasSize(1); } } @@ -1136,10 +1127,7 @@ public void failedResetContinueAsReset() throws InterruptedException { @DisplayName("Test that we are getting stuck if the report of a failure happen") void testGetStuckAfterRun() throws InterruptedException { Mockito.doThrow(ApplicationFailure.newNonRetryableFailure("", "")) - .when(mJobCreationAndStatusUpdateActivity).attemptFailureWithAttemptNumber(Mockito.any()); - - Mockito.when(mConfigFetchActivity.getMaxAttempt()) - .thenReturn(new GetMaxAttemptOutput(3)); + .when(mJobCreationAndStatusUpdateActivity).attemptFailure(Mockito.any()); final UUID testId = UUID.randomUUID(); final TestStateListener testStateListener = new TestStateListener(); @@ -1218,7 +1206,7 @@ public void failedResetJobWaitsOnRestart() throws InterruptedException { } - private class HasFailureFromOrigin implements ArgumentMatcher { + private class HasFailureFromOrigin implements ArgumentMatcher { private final FailureOrigin expectedFailureOrigin; @@ -1227,26 +1215,26 @@ public HasFailureFromOrigin(final FailureOrigin failureOrigin) { } @Override - public boolean matches(final AttemptNumberFailureInput arg) { + public boolean matches(final AttemptFailureInput arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureOrigin().equals(expectedFailureOrigin)); } } - private class HasCancellationFailure implements ArgumentMatcher { + private class HasCancellationFailure implements ArgumentMatcher { private final long expectedJobId; - private final int expectedAttemptNumber; + private final int expectedAttemptId; - public HasCancellationFailure(final long jobId, final int attemptNumber) { + public HasCancellationFailure(final long jobId, final int attemptId) { this.expectedJobId = jobId; - this.expectedAttemptNumber = attemptNumber; + this.expectedAttemptId = attemptId; } @Override - public boolean matches(final JobCancelledInputWithAttemptNumber arg) { + public boolean matches(final JobCancelledInput arg) { return arg.getAttemptFailureSummary().getFailures().stream().anyMatch(f -> f.getFailureType().equals(FailureType.MANUAL_CANCELLATION)) - && arg.getJobId() == expectedJobId && arg.getAttemptNumber() == expectedAttemptNumber; + && arg.getJobId() == expectedJobId && arg.getAttemptId() == expectedAttemptId; } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java deleted file mode 100644 index bba04f6cea42..000000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2021 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling; - -import io.temporal.testing.WorkflowReplayer; -import java.io.File; -import java.net.URL; -import org.junit.jupiter.api.Test; - -// TODO: Auto generation of the input and more scenario coverage -public class WorkflowReplayingTest { - - @Test - public void replaySimpleSuccessfulWorkflow() throws Exception { - final URL historyPath = getClass().getClassLoader().getResource("workflowHistory.json"); - - final File historyFile = new File(historyPath.toURI()); - - WorkflowReplayer.replayWorkflowExecution(historyFile, ConnectionManagerWorkflowImpl.class); - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java index f643bc4bd07e..5f480cf4fb80 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java @@ -29,7 +29,6 @@ import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptFailureInput; -import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.AttemptNumberCreationOutput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCancelledInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationInput; import io.airbyte.workers.temporal.scheduling.activities.JobCreationAndStatusUpdateActivity.JobCreationOutput; @@ -85,7 +84,6 @@ public class JobCreationAndStatusUpdateActivityTest { private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final long JOB_ID = 123L; private static final int ATTEMPT_ID = 0; - private static final int ATTEMPT_NUMBER = 1; private static final StandardSyncOutput standardSyncOutput = new StandardSyncOutput() .withStandardSyncSummary( new StandardSyncSummary() @@ -166,56 +164,6 @@ public void createAttemptThrowException() throws IOException { .hasCauseInstanceOf(IOException.class); } - @Test - @DisplayName("Test attempt creation") - public void createAttemptNumber() throws IOException { - Mockito.when(mConfigRepository.getDatabase()).thenReturn(Mockito.mock(ExceptionWrappingDatabase.class)); - - final Job mJob = Mockito.mock(Job.class); - - Mockito.when(mJobPersistence.getJob(JOB_ID)) - .thenReturn(mJob); - - final WorkerRun mWorkerRun = Mockito.mock(WorkerRun.class); - - Mockito.when(mTemporalWorkerRunFactory.create(mJob)) - .thenReturn(mWorkerRun); - - final Path mPath = Mockito.mock(Path.class); - final Path path = Path.of("test"); - Mockito.when(mPath.resolve(Mockito.anyString())) - .thenReturn(path); - Mockito.when(mWorkerRun.getJobRoot()) - .thenReturn(mPath); - - Mockito.when(mJobPersistence.createAttempt(JOB_ID, path)) - .thenReturn(ATTEMPT_NUMBER); - - final LogClientSingleton mLogClientSingleton = Mockito.mock(LogClientSingleton.class); - try (final MockedStatic utilities = Mockito.mockStatic(LogClientSingleton.class)) { - utilities.when(() -> LogClientSingleton.getInstance()) - .thenReturn(mLogClientSingleton); - - final AttemptNumberCreationOutput output = jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( - JOB_ID)); - - Mockito.verify(mLogClientSingleton).setJobMdc(mWorkerEnvironment, mLogConfigs, mPath); - Assertions.assertThat(output.getAttemptNumber()).isEqualTo(ATTEMPT_NUMBER); - } - } - - @Test - @DisplayName("Test exception errors are properly wrapped") - public void createAttemptNumberThrowException() throws IOException { - Mockito.when(mJobPersistence.getJob(JOB_ID)) - .thenThrow(new IOException()); - - Assertions.assertThatThrownBy(() -> jobCreationAndStatusUpdateActivity.createNewAttemptNumber(new AttemptCreationInput( - JOB_ID))) - .isInstanceOf(RetryableException.class) - .hasCauseInstanceOf(IOException.class); - } - } @Nested diff --git a/airbyte-workers/src/test/resources/workflowHistory.json b/airbyte-workers/src/test/resources/workflowHistory.json deleted file mode 100644 index a83619c8ea29..000000000000 --- a/airbyte-workers/src/test/resources/workflowHistory.json +++ /dev/null @@ -1,939 +0,0 @@ -{ - "events": [ - { - "eventId": "1", - "eventTime": "2022-03-08T22:47:57.534705300Z", - "eventType": "WorkflowExecutionStarted", - "taskId": "1048664", - "workflowExecutionStartedEventAttributes": { - "workflowType": { - "name": "ConnectionManagerWorkflow" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" - } - ] - }, - "workflowExecutionTimeout": "0s", - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "originalExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", - "identity": "1@3de809efb2ed", - "firstExecutionRunId": "4e6584e1-b030-4e42-a7bc-15dee5d195c0", - "retryPolicy": { - "initialInterval": "1s", - "backoffCoefficient": 2, - "maximumInterval": "100s", - "maximumAttempts": 1 - }, - "attempt": 1, - "firstWorkflowTaskBackoff": "0s", - "header": {} - } - }, - { - "eventId": "2", - "eventTime": "2022-03-08T22:47:57.534975800Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048665", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "3", - "eventTime": "2022-03-08T22:47:57.563121800Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048669", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "2", - "identity": "1@2741f9c3f558", - "requestId": "e9cf205d-de02-4139-b00d-fab56c4b9fd7" - } - }, - { - "eventId": "4", - "eventTime": "2022-03-08T22:47:57.646973200Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048672", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "2", - "startedEventId": "3", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "5", - "eventTime": "2022-03-08T22:47:57.647029Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048673", - "activityTaskScheduledEventAttributes": { - "activityId": "c45be44b-784b-3a0c-9473-e80129b65969", - "activityType": { - "name": "GetTimeToWait" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "4", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "6", - "eventTime": "2022-03-08T22:47:57.668813100Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048677", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "5", - "identity": "1@2741f9c3f558", - "requestId": "df9001bd-0c42-4415-a631-0a37ee3f7698", - "attempt": 1 - } - }, - { - "eventId": "7", - "eventTime": "2022-03-08T22:47:57.856240400Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048678", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJ0aW1lVG9XYWl0IjowLjB9" - } - ] - }, - "scheduledEventId": "5", - "startedEventId": "6", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "8", - "eventTime": "2022-03-08T22:47:57.856293200Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048679", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "9", - "eventTime": "2022-03-08T22:47:57.876328300Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048683", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "8", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "80932560-7d5d-4f5f-9982-561857b07f50" - } - }, - { - "eventId": "10", - "eventTime": "2022-03-08T22:47:57.921753Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048686", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "8", - "startedEventId": "9", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "11", - "eventTime": "2022-03-08T22:47:57.921814200Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048687", - "activityTaskScheduledEventAttributes": { - "activityId": "c7b20a16-db46-3dd1-b8ac-e2a93d3a8e0d", - "activityType": { - "name": "CreateNewJob" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJyZXNldCI6ZmFsc2V9" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "10", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "12", - "eventTime": "2022-03-08T22:47:57.942341Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048691", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "11", - "identity": "1@2741f9c3f558", - "requestId": "29e77ce9-f715-4f19-9fe2-b5b94201d0b3", - "attempt": 1 - } - }, - { - "eventId": "13", - "eventTime": "2022-03-08T22:47:58.268669700Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048692", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MX0=" - } - ] - }, - "scheduledEventId": "11", - "startedEventId": "12", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "14", - "eventTime": "2022-03-08T22:47:58.268723100Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048693", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "15", - "eventTime": "2022-03-08T22:47:58.283301600Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048697", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "14", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "18d2f139-0794-4dfb-b36d-1448df3eb350" - } - }, - { - "eventId": "16", - "eventTime": "2022-03-08T22:47:58.302388600Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048700", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "14", - "startedEventId": "15", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "17", - "eventTime": "2022-03-08T22:47:58.302431Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048701", - "activityTaskScheduledEventAttributes": { - "activityId": "3c1b8fa0-437b-3bc2-a365-352e9a5d765d", - "activityType": { - "name": "CreateNewAttempt" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "16", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "18", - "eventTime": "2022-03-08T22:47:58.314956300Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048705", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "17", - "identity": "1@2741f9c3f558", - "requestId": "0221b660-4f40-4bcf-9e6a-2a9d5898bb91", - "attempt": 1 - } - }, - { - "eventId": "19", - "eventTime": "2022-03-08T22:47:58.400030800Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048706", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJhdHRlbXB0SWQiOjB9" - } - ] - }, - "scheduledEventId": "17", - "startedEventId": "18", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "20", - "eventTime": "2022-03-08T22:47:58.400072800Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048707", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "21", - "eventTime": "2022-03-08T22:47:58.414415400Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048711", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "20", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "e4110bad-579c-4ac2-a3eb-3836d7d6f841" - } - }, - { - "eventId": "22", - "eventTime": "2022-03-08T22:47:58.431563800Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048714", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "20", - "startedEventId": "21", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "23", - "eventTime": "2022-03-08T22:47:58.431607100Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048715", - "activityTaskScheduledEventAttributes": { - "activityId": "5aa065bf-5ef2-3e24-b560-c6b3c1f4e2bc", - "activityType": { - "name": "GetSyncWorkflowInput" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJhdHRlbXB0SWQiOjAsImpvYklkIjoxLCJyZXNldCI6ZmFsc2V9" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "22", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "24", - "eventTime": "2022-03-08T22:47:58.445218800Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048719", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "23", - "identity": "1@2741f9c3f558", - "requestId": "7165cc86-d137-4b0f-906b-a7e52a1074e4", - "attempt": 1 - } - }, - { - "eventId": "25", - "eventTime": "2022-03-08T22:47:58.471038600Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048720", - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JSdW5Db25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowfSwic291cmNlTGF1bmNoZXJDb25maWciOnsiam9iSWQiOiIxIiwiYXR0ZW1wdElkIjowLCJkb2NrZXJJbWFnZSI6ImFpcmJ5dGUvc291cmNlLXBva2VhcGk6MC4xLjQifSwiZGVzdGluYXRpb25MYXVuY2hlckNvbmZpZyI6eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9LCJzeW5jSW5wdXQiOnsibmFtZXNwYWNlRGVmaW5pdGlvbiI6ImRlc3RpbmF0aW9uIiwibmFtZXNwYWNlRm9ybWF0IjoiJHtTT1VSQ0VfTkFNRVNQQUNFfSIsInByZWZpeCI6IiIsInNvdXJjZUNvbmZpZ3VyYXRpb24iOnsicG9rZW1vbl9uYW1lIjoiZGl0dG8ifSwiZGVzdGluYXRpb25Db25maWd1cmF0aW9uIjp7ImRlc3RpbmF0aW9uX3BhdGgiOiIvdG1wIn0sIm9wZXJhdGlvblNlcXVlbmNlIjpbXSwiY2F0YWxvZyI6eyJzdHJlYW1zIjpbeyJzdHJlYW0iOnsibmFtZSI6InBva2Vtb24iLCJqc29uX3NjaGVtYSI6eyJ0eXBlIjoib2JqZWN0IiwiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInByb3BlcnRpZXMiOnsiaWQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZvcm1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19LCJtb3ZlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJtb3ZlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZ3JvdXBfZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uX2dyb3VwIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImxldmVsX2xlYXJuZWRfYXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibW92ZV9sZWFybl9tZXRob2QiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwib3JkZXIiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3RhdHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJlZmZvcnQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYmFzZV9zdGF0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sInR5cGVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidHlwZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19LCJoZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwid2VpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInNwZWNpZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3ByaXRlcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJiYWNrX3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55X2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiYWJpbGl0aWVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYWJpbGl0eSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJpc19oaWRkZW4iOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfX19fSwiaGVsZF9pdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJpdGVtIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInZlcnNpb25fZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJyYXJpdHkiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJpc19kZWZhdWx0ICI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19LCJnYW1lX2luZGljZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJnYW1lX2luZGV4Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19fX0sImJhc2VfZXhwZXJpZW5jZSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJsb2NhdGlvbl9hcmVhX2VuY291bnRlcnMiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXX0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6ImFwcGVuZCIsInByaW1hcnlfa2V5IjpbXX1dfSwicmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9LCJzb3VyY2VSZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sImRlc3RpbmF0aW9uUmVzb3VyY2VSZXF1aXJlbWVudHMiOnt9fX0=" - } - ] - }, - "scheduledEventId": "23", - "startedEventId": "24", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "26", - "eventTime": "2022-03-08T22:47:58.471218800Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048721", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "27", - "eventTime": "2022-03-08T22:47:58.485851600Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048725", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "26", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "289a7723-efac-4cfa-bad2-f0c022b27421" - } - }, - { - "eventId": "28", - "eventTime": "2022-03-08T22:47:58.513022200Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048728", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "26", - "startedEventId": "27", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "29", - "eventTime": "2022-03-08T22:47:58.513073500Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048729", - "activityTaskScheduledEventAttributes": { - "activityId": "95686aea-a2ac-3e1e-a512-0790d3a4e95f", - "activityType": { - "name": "ReportJobStart" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "28", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "30", - "eventTime": "2022-03-08T22:47:58.528653400Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048733", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "29", - "identity": "1@2741f9c3f558", - "requestId": "0e4b03aa-2493-4f7d-b832-4e98e13551da", - "attempt": 1 - } - }, - { - "eventId": "31", - "eventTime": "2022-03-08T22:47:58.668827900Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048734", - "activityTaskCompletedEventAttributes": { - "scheduledEventId": "29", - "startedEventId": "30", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "32", - "eventTime": "2022-03-08T22:47:58.668874600Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048735", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "33", - "eventTime": "2022-03-08T22:47:58.682929200Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048739", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "32", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "caa61b0a-5d34-48a8-ab16-997d3ba9eab5" - } - }, - { - "eventId": "34", - "eventTime": "2022-03-08T22:47:58.735519100Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048742", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "32", - "startedEventId": "33", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "35", - "eventTime": "2022-03-08T22:47:58.735579100Z", - "eventType": "MarkerRecorded", - "taskId": "1048743", - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "InRhc2tfcXVldWVfY2hhbmdlX2Zyb21fY29ubmVjdGlvbl91cGRhdGVyX3RvX3N5bmMi" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "34" - } - }, - { - "eventId": "36", - "eventTime": "2022-03-08T22:47:58.735800Z", - "eventType": "StartChildWorkflowExecutionInitiated", - "taskId": "1048744", - "startChildWorkflowExecutionInitiatedEventAttributes": { - "namespace": "default", - "workflowId": "sync_1", - "workflowType": { - "name": "SyncWorkflow" - }, - "taskQueue": { - "name": "SYNC" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjB9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9zb3VyY2UtcG9rZWFwaTowLjEuNCJ9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjEiLCJhdHRlbXB0SWQiOjAsImRvY2tlckltYWdlIjoiYWlyYnl0ZS9kZXN0aW5hdGlvbi1sb2NhbC1qc29uOjAuMi4xMCJ9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJuYW1lc3BhY2VEZWZpbml0aW9uIjoiZGVzdGluYXRpb24iLCJuYW1lc3BhY2VGb3JtYXQiOiIke1NPVVJDRV9OQU1FU1BBQ0V9IiwicHJlZml4IjoiIiwic291cmNlQ29uZmlndXJhdGlvbiI6eyJwb2tlbW9uX25hbWUiOiJkaXR0byJ9LCJkZXN0aW5hdGlvbkNvbmZpZ3VyYXRpb24iOnsiZGVzdGluYXRpb25fcGF0aCI6Ii90bXAifSwib3BlcmF0aW9uU2VxdWVuY2UiOltdLCJjYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJyZXNvdXJjZVJlcXVpcmVtZW50cyI6e30sInNvdXJjZVJlc291cmNlUmVxdWlyZW1lbnRzIjp7fSwiZGVzdGluYXRpb25SZXNvdXJjZVJlcXVpcmVtZW50cyI6e319" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ImI5MTZmZDg2LWE1YTYtNDhhYy04ODgwLTQ5Nzg3NGNmNTNjZiI=" - } - ] - }, - "workflowExecutionTimeout": "0s", - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "parentClosePolicy": "RequestCancel", - "workflowTaskCompletedEventId": "34", - "workflowIdReusePolicy": "AllowDuplicate", - "header": {} - } - }, - { - "eventId": "37", - "eventTime": "2022-03-08T22:47:58.762930500Z", - "eventType": "ChildWorkflowExecutionStarted", - "taskId": "1048747", - "childWorkflowExecutionStartedEventAttributes": { - "namespace": "default", - "initiatedEventId": "36", - "workflowExecution": { - "workflowId": "sync_1", - "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" - }, - "workflowType": { - "name": "SyncWorkflow" - }, - "header": {} - } - }, - { - "eventId": "38", - "eventTime": "2022-03-08T22:47:58.762971900Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048748", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "39", - "eventTime": "2022-03-08T22:47:58.774051900Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048752", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "38", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "ca98144b-47f8-486d-b260-2e1dc42bd68e" - } - }, - { - "eventId": "40", - "eventTime": "2022-03-08T22:47:58.794905100Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048755", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "38", - "startedEventId": "39", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "41", - "eventTime": "2022-03-08T22:48:02.408058300Z", - "eventType": "ChildWorkflowExecutionCompleted", - "taskId": "1048757", - "childWorkflowExecutionCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzdGFuZGFyZFN5bmNTdW1tYXJ5Ijp7InN0YXR1cyI6ImNvbXBsZXRlZCIsInJlY29yZHNTeW5jZWQiOjEsImJ5dGVzU3luY2VkIjoyMjcxNSwic3RhcnRUaW1lIjoxNjQ2Nzc5Njc4OTM5LCJlbmRUaW1lIjoxNjQ2Nzc5NjgyMjM4LCJ0b3RhbFN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJzdGF0ZU1lc3NhZ2VzRW1pdHRlZCI6MCwicmVjb3Jkc0NvbW1pdHRlZCI6MX0sInN0cmVhbVN0YXRzIjpbeyJzdHJlYW1OYW1lIjoicG9rZW1vbiIsInN0YXRzIjp7InJlY29yZHNFbWl0dGVkIjoxLCJieXRlc0VtaXR0ZWQiOjIyNzE1LCJyZWNvcmRzQ29tbWl0dGVkIjoxfX1dfSwib3V0cHV0X2NhdGFsb2ciOnsic3RyZWFtcyI6W3sic3RyZWFtIjp7Im5hbWUiOiJwb2tlbW9uIiwianNvbl9zY2hlbWEiOnsidHlwZSI6Im9iamVjdCIsIiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJwcm9wZXJ0aWVzIjp7ImlkIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmb3JtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fSwibW92ZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsibW92ZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2dyb3VwX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidmVyc2lvbl9ncm91cCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJsZXZlbF9sZWFybmVkX2F0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm1vdmVfbGVhcm5fbWV0aG9kIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sIm9yZGVyIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInN0YXRzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InN0YXQiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZWZmb3J0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImJhc2Vfc3RhdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJ0eXBlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInR5cGUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fSwiaGVpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIndlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzcGVjaWVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sInNwcml0ZXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiYmFja19zaGlueSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2RlZmF1bHQiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmcm9udF9zaGlueV9mZW1hbGUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImFiaWxpdGllcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzbG90Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImFiaWxpdHkiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiaXNfaGlkZGVuIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX19fX0sImhlbGRfaXRlbXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsiaXRlbSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJ2ZXJzaW9uX2RldGFpbHMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsicmFyaXR5Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX19fX19fSwiaXNfZGVmYXVsdCAiOnsidHlwZSI6WyJudWxsIiwiYm9vbGVhbiJdfSwiZ2FtZV9pbmRpY2VzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb24iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwiZ2FtZV9pbmRleCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19fX19LCJiYXNlX2V4cGVyaWVuY2UiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibG9jYXRpb25fYXJlYV9lbmNvdW50ZXJzIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W119LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJhcHBlbmQiLCJwcmltYXJ5X2tleSI6W119XX0sImZhaWx1cmVzIjpbXX0=" - } - ] - }, - "namespace": "default", - "workflowExecution": { - "workflowId": "sync_1", - "runId": "140640f0-c577-4d9c-8777-4e7e40b64241" - }, - "workflowType": { - "name": "SyncWorkflow" - }, - "initiatedEventId": "36", - "startedEventId": "37" - } - }, - { - "eventId": "42", - "eventTime": "2022-03-08T22:48:02.408127200Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048758", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "43", - "eventTime": "2022-03-08T22:48:02.422112800Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048762", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "42", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "c0916d42-c83c-4e9d-805a-29ca5f979624" - } - }, - { - "eventId": "44", - "eventTime": "2022-03-08T22:48:02.454203Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048765", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "42", - "startedEventId": "43", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "45", - "eventTime": "2022-03-08T22:48:02.454256Z", - "eventType": "ActivityTaskScheduled", - "taskId": "1048766", - "activityTaskScheduledEventAttributes": { - "activityId": "b169a729-47bc-38f7-a315-c1a4b6d96466", - "activityType": { - "name": "JobSuccess" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "header": {}, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6MSwiYXR0ZW1wdElkIjowLCJzdGFuZGFyZFN5bmNPdXRwdXQiOnsic3RhbmRhcmRTeW5jU3VtbWFyeSI6eyJzdGF0dXMiOiJjb21wbGV0ZWQiLCJyZWNvcmRzU3luY2VkIjoxLCJieXRlc1N5bmNlZCI6MjI3MTUsInN0YXJ0VGltZSI6MTY0Njc3OTY3ODkzOSwiZW5kVGltZSI6MTY0Njc3OTY4MjIzOCwidG90YWxTdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwic3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjAsInJlY29yZHNDb21taXR0ZWQiOjF9LCJzdHJlYW1TdGF0cyI6W3sic3RyZWFtTmFtZSI6InBva2Vtb24iLCJzdGF0cyI6eyJyZWNvcmRzRW1pdHRlZCI6MSwiYnl0ZXNFbWl0dGVkIjoyMjcxNSwicmVjb3Jkc0NvbW1pdHRlZCI6MX19XX0sIm91dHB1dF9jYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoicG9rZW1vbiIsImpzb25fc2NoZW1hIjp7InR5cGUiOiJvYmplY3QiLCIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwicHJvcGVydGllcyI6eyJpZCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZm9ybXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9ncm91cF9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InZlcnNpb25fZ3JvdXAiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwibGV2ZWxfbGVhcm5lZF9hdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJtb3ZlX2xlYXJuX21ldGhvZCI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fX19fX19LCJvcmRlciI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzdGF0cyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJzdGF0Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX0sImhlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ3ZWlnaHQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwic3BlY2llcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJhYmlsaXRpZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJhYmlsaXR5Ijp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImlzX2hpZGRlbiI6eyJ0eXBlIjpbIm51bGwiLCJib29sZWFuIl19fX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwidmVyc2lvbl9kZXRhaWxzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InJhcml0eSI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX19fX19fX0sImlzX2RlZmF1bHQgIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fX0sImdhbWVfaW5kZXgiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX19fSwiYmFzZV9leHBlcmllbmNlIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sImxvY2F0aW9uX2FyZWFfZW5jb3VudGVycyI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoiYXBwZW5kIiwicHJpbWFyeV9rZXkiOltdfV19LCJmYWlsdXJlcyI6W119fQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "44", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "3000s", - "maximumAttempts": 10 - } - } - }, - { - "eventId": "46", - "eventTime": "2022-03-08T22:48:02.437049800Z", - "eventType": "ActivityTaskStarted", - "taskId": "1048770", - "activityTaskStartedEventAttributes": { - "scheduledEventId": "45", - "identity": "1@2741f9c3f558", - "requestId": "9d2fc180-ec33-42a2-a259-d29afb281992", - "attempt": 1 - } - }, - { - "eventId": "47", - "eventTime": "2022-03-08T22:48:02.664164100Z", - "eventType": "ActivityTaskCompleted", - "taskId": "1048771", - "activityTaskCompletedEventAttributes": { - "scheduledEventId": "45", - "startedEventId": "46", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "48", - "eventTime": "2022-03-08T22:48:02.664217700Z", - "eventType": "WorkflowTaskScheduled", - "taskId": "1048772", - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@2741f9c3f558:e7111d28-2117-4614-9bb1-8a73524b789f", - "kind": "Sticky" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "49", - "eventTime": "2022-03-08T22:48:02.676895300Z", - "eventType": "WorkflowTaskStarted", - "taskId": "1048776", - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "48", - "identity": "e7111d28-2117-4614-9bb1-8a73524b789f", - "requestId": "b43c8f30-2500-47d6-a8f6-aa2cd0d99218" - } - }, - { - "eventId": "50", - "eventTime": "2022-03-08T22:48:02.709745Z", - "eventType": "WorkflowTaskCompleted", - "taskId": "1048779", - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "48", - "startedEventId": "49", - "identity": "1@2741f9c3f558" - } - }, - { - "eventId": "51", - "eventTime": "2022-03-08T22:48:02.709811400Z", - "eventType": "WorkflowExecutionContinuedAsNew", - "taskId": "1048780", - "workflowExecutionContinuedAsNewEventAttributes": { - "newExecutionRunId": "e81cf38b-7f11-4eeb-8c85-301778bf2671", - "workflowType": { - "name": "ConnectionManagerWorkflow" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiOTE2ZmQ4Ni1hNWE2LTQ4YWMtODg4MC00OTc4NzRjZjUzY2YiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2V9" - } - ] - }, - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "workflowTaskCompletedEventId": "50", - "header": {} - } - } - ] -} From 110891068562f0b7e6b8e6a37b74a60b19a23d0e Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Fri, 11 Mar 2022 16:16:37 -0800 Subject: [PATCH 27/38] Revert "Jamakase/select default sync modes (#10320)" (#11080) This reverts commit ff4762c7fb6dd266074887afd789af5d9fdbad98. --- .../Markdown/__mocks__/Markdown.tsx | 7 - .../components/Markdown/__mocks__/index.ts | 1 - airbyte-webapp/src/core/domain/catalog/api.ts | 6 +- .../src/{setupTests.tsx => setupTests.ts} | 3 - .../ConnectionForm/ConnectionForm.tsx | 2 +- .../ConnectionForm/formConfig.test.tsx | 358 ------------------ .../Connection/ConnectionForm/formConfig.tsx | 109 +++--- .../ServiceForm/ServiceForm.test.tsx | 10 + 8 files changed, 58 insertions(+), 438 deletions(-) delete mode 100644 airbyte-webapp/src/components/Markdown/__mocks__/Markdown.tsx delete mode 100644 airbyte-webapp/src/components/Markdown/__mocks__/index.ts rename airbyte-webapp/src/{setupTests.tsx => setupTests.ts} (72%) delete mode 100644 airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.test.tsx diff --git a/airbyte-webapp/src/components/Markdown/__mocks__/Markdown.tsx b/airbyte-webapp/src/components/Markdown/__mocks__/Markdown.tsx deleted file mode 100644 index 6330f6e774c4..000000000000 --- a/airbyte-webapp/src/components/Markdown/__mocks__/Markdown.tsx +++ /dev/null @@ -1,7 +0,0 @@ -import React from "react"; - -function ReactMarkdown({ children }: React.PropsWithChildren) { - return <>{children}; -} - -export default ReactMarkdown; diff --git a/airbyte-webapp/src/components/Markdown/__mocks__/index.ts b/airbyte-webapp/src/components/Markdown/__mocks__/index.ts deleted file mode 100644 index f14dafbcd5b6..000000000000 --- a/airbyte-webapp/src/components/Markdown/__mocks__/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { default as Markdown } from "./Markdown"; diff --git a/airbyte-webapp/src/core/domain/catalog/api.ts b/airbyte-webapp/src/core/domain/catalog/api.ts index 62c29abd1736..84c567eea541 100644 --- a/airbyte-webapp/src/core/domain/catalog/api.ts +++ b/airbyte-webapp/src/core/domain/catalog/api.ts @@ -19,7 +19,7 @@ export enum DestinationSyncMode { Dedupted = "append_dedup", } -export type AirbyteSchemaStream = { +export type SyncSchemaStreamInner = { stream: AirbyteStream; config: AirbyteStreamConfiguration; }; @@ -54,8 +54,4 @@ export type SyncSchema = { streams: SyncSchemaStream[]; }; -export type AirbyteSyncSchema = { - streams: AirbyteSchemaStream[]; -}; - export type Path = string[]; diff --git a/airbyte-webapp/src/setupTests.tsx b/airbyte-webapp/src/setupTests.ts similarity index 72% rename from airbyte-webapp/src/setupTests.tsx rename to airbyte-webapp/src/setupTests.ts index 6fc622d9c8c6..1557cba04fa9 100644 --- a/airbyte-webapp/src/setupTests.tsx +++ b/airbyte-webapp/src/setupTests.ts @@ -4,6 +4,3 @@ // learn more: https://github.com/testing-library/jest-dom import "@testing-library/jest-dom/extend-expect"; import "@testing-library/jest-dom"; - -// hack to fix tests. https://github.com/remarkjs/react-markdown/issues/635 -jest.mock("components/Markdown"); diff --git a/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx b/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx index fccf08de77cc..59c68eba26f9 100644 --- a/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx +++ b/airbyte-webapp/src/views/Connection/ConnectionForm/ConnectionForm.tsx @@ -30,7 +30,7 @@ import { import { OperationsSection } from "./components/OperationsSection"; const EditLaterMessage = styled(Label)` - margin: 20px 0 29px; + margin: -20px 0 29px; `; const ConnectorLabel = styled(ControlLabels)` diff --git a/airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.test.tsx b/airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.test.tsx deleted file mode 100644 index 97ad4cc4bb52..000000000000 --- a/airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.test.tsx +++ /dev/null @@ -1,358 +0,0 @@ -import { - AirbyteStreamConfiguration, - DestinationSyncMode, - SyncMode, -} from "core/domain/catalog"; -import { calculateInitialCatalog } from "./formConfig"; - -describe("calculateInitialCatalog", () => { - it("should assign ids to all streams", () => { - const values = calculateInitialCatalog( - { - streams: [ - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [], - }, - config: { - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - ], - }, - { - connectionSpecification: {}, - destinationDefinitionId: "", - documentationUrl: "", - supportsDbt: false, - supportsNormalization: false, - supportedDestinationSyncModes: [], - }, - false - ); - - values.streams.forEach((stream) => { - expect(stream).toHaveProperty("id"); - }); - }); - - it("should select append_dedup if destination supports it", () => { - const values = calculateInitialCatalog( - { - streams: [ - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [], - }, - config: { - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [], - }, - config: { - destinationSyncMode: DestinationSyncMode.Dedupted, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [], - }, - config: { - destinationSyncMode: DestinationSyncMode.Append, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - ], - }, - { - connectionSpecification: {}, - destinationDefinitionId: "", - documentationUrl: "", - supportsDbt: false, - supportsNormalization: false, - supportedDestinationSyncModes: [DestinationSyncMode.Dedupted], - }, - false - ); - - values.streams.forEach((stream) => - expect(stream).toHaveProperty( - "config.destinationSyncMode", - DestinationSyncMode.Dedupted - ) - ); - }); - - it("should not change syncMode and destinationSyncMode in EditMode", () => { - const { streams } = calculateInitialCatalog( - { - streams: [ - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.Incremental], - }, - config: { - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - ], - }, - { - connectionSpecification: {}, - destinationDefinitionId: "", - documentationUrl: "", - supportsDbt: false, - supportsNormalization: false, - supportedDestinationSyncModes: [DestinationSyncMode.Dedupted], - }, - true - ); - - expect(streams[0]).toHaveProperty("config.syncMode", SyncMode.FullRefresh); - expect(streams[0]).toHaveProperty( - "config.destinationSyncMode", - DestinationSyncMode.Overwrite - ); - }); - - it("should prefer incremental sync mode", () => { - const { streams } = calculateInitialCatalog( - { - streams: [ - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.Incremental], - }, - config: { - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.Incremental, SyncMode.FullRefresh], - }, - config: { - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.FullRefresh], - }, - config: { - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - syncMode: SyncMode.FullRefresh, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - ], - }, - { - connectionSpecification: {}, - destinationDefinitionId: "", - documentationUrl: "", - supportsDbt: false, - supportsNormalization: false, - supportedDestinationSyncModes: [DestinationSyncMode.Dedupted], - }, - false - ); - - expect(streams[0]).toHaveProperty("config.syncMode", SyncMode.Incremental); - expect(streams[1]).toHaveProperty("config.syncMode", SyncMode.Incremental); - expect(streams[2]).toHaveProperty("config.syncMode", SyncMode.FullRefresh); - }); - - it("should assign default value cursorField when it is available and no cursorField is selected", () => { - const { streams } = calculateInitialCatalog( - { - streams: [ - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: ["default_path"], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.Incremental], - }, - config: { - syncMode: SyncMode.FullRefresh, - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: ["default_path"], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.Incremental], - }, - config: { - syncMode: SyncMode.FullRefresh, - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - cursorField: ["selected_path"], - primaryKey: [], - aliasName: "", - }, - }, - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.FullRefresh], - }, - config: { - syncMode: SyncMode.FullRefresh, - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - cursorField: [], - primaryKey: [], - aliasName: "", - }, - }, - ], - }, - { - connectionSpecification: {}, - destinationDefinitionId: "", - documentationUrl: "", - supportsDbt: false, - supportsNormalization: false, - supportedDestinationSyncModes: [DestinationSyncMode.Dedupted], - }, - false - ); - - expect(streams[0]).toHaveProperty("config.cursorField", ["default_path"]); - expect(streams[1]).toHaveProperty("config.cursorField", ["selected_path"]); - expect(streams[2]).toHaveProperty("config.cursorField", []); - }); - - it("should pick first syncMode when it is somehow nullable", () => { - const { streams } = calculateInitialCatalog( - { - streams: [ - { - stream: { - sourceDefinedCursor: null, - defaultCursorField: [], - sourceDefinedPrimaryKey: [], - jsonSchema: {}, - name: "name", - supportedSyncModes: [SyncMode.FullRefresh], - }, - config: ({ - destinationSyncMode: DestinationSyncMode.Overwrite, - selected: false, - cursorField: [], - primaryKey: [], - aliasName: "", - } as unknown) as AirbyteStreamConfiguration, - }, - ], - }, - { - connectionSpecification: {}, - destinationDefinitionId: "", - documentationUrl: "", - supportsDbt: false, - supportsNormalization: false, - supportedDestinationSyncModes: [DestinationSyncMode.Dedupted], - }, - false - ); - - expect(streams[0]).toHaveProperty("config.syncMode", SyncMode.FullRefresh); - }); -}); diff --git a/airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.tsx b/airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.tsx index 40b0a7fbd7ba..5912ca6406c9 100644 --- a/airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.tsx +++ b/airbyte-webapp/src/views/Connection/ConnectionForm/formConfig.tsx @@ -5,7 +5,6 @@ import { setIn } from "formik"; import { AirbyteStreamConfiguration, - AirbyteSyncSchema, DestinationSyncMode, SyncMode, SyncSchema, @@ -205,59 +204,52 @@ function getDefaultCursorField(streamNode: SyncSchemaStream): string[] { return streamNode.config.cursorField; } -const calculateInitialCatalog = ( - schema: AirbyteSyncSchema, - destDefinition: DestinationDefinitionSpecification, - isEditMode?: boolean -): SyncSchema => ({ - streams: schema.streams.map((apiNode, id) => { - const nodeWithId: SyncSchemaStream = { ...apiNode, id: id.toString() }; - - // If the value in supportedSyncModes is empty assume the only supported sync mode is FULL_REFRESH. - // Otherwise, it supports whatever sync modes are present. - const streamNode = nodeWithId.stream.supportedSyncModes?.length - ? nodeWithId - : setIn(nodeWithId, "stream.supportedSyncModes", [SyncMode.FullRefresh]); - - // If syncMode isn't null and we are in create mode - don't change item - // According to types syncMode is a non-null field, but it is a legacy check for older versions - if (streamNode.config.syncMode && isEditMode) { - return streamNode; - } - - const updatedConfig: AirbyteStreamConfiguration = { - ...streamNode.config, - }; - - if ( - destDefinition.supportedDestinationSyncModes.includes( - DestinationSyncMode.Dedupted - ) - ) { - updatedConfig.destinationSyncMode = DestinationSyncMode.Dedupted; - } - - const supportedSyncModes = streamNode.stream.supportedSyncModes; - - // Prefer INCREMENTAL sync mode over other sync modes - if (supportedSyncModes.includes(SyncMode.Incremental)) { - updatedConfig.syncMode = SyncMode.Incremental; - updatedConfig.cursorField = streamNode.config.cursorField.length - ? streamNode.config.cursorField - : getDefaultCursorField(streamNode); - } - - // If source syncMode is somehow nullable - just pick one from supportedSyncModes - if (!updatedConfig.syncMode) { - updatedConfig.syncMode = streamNode.stream.supportedSyncModes[0]; - } +const useInitialSchema = (schema: SyncSchema): SyncSchema => + useMemo( + () => ({ + streams: schema.streams.map((apiNode, id) => { + const nodeWithId: SyncSchemaStream = { ...apiNode, id: id.toString() }; + + // If the value in supportedSyncModes is empty assume the only supported sync mode is FULL_REFRESH. + // Otherwise, it supports whatever sync modes are present. + const streamNode = nodeWithId.stream.supportedSyncModes?.length + ? nodeWithId + : setIn(nodeWithId, "stream.supportedSyncModes", [ + SyncMode.FullRefresh, + ]); + + // If syncMode isn't null - don't change item + if (streamNode.config.syncMode) { + return streamNode; + } + + const updateStreamConfig = ( + config: Partial + ): SyncSchemaStream => ({ + ...streamNode, + config: { ...streamNode.config, ...config }, + }); - return { - ...streamNode, - config: updatedConfig, - }; - }), -}); + const supportedSyncModes = streamNode.stream.supportedSyncModes; + + // Prefer INCREMENTAL sync mode over other sync modes + if (supportedSyncModes.includes(SyncMode.Incremental)) { + return updateStreamConfig({ + cursorField: streamNode.config.cursorField.length + ? streamNode.config.cursorField + : getDefaultCursorField(streamNode), + syncMode: SyncMode.Incremental, + }); + } + + // If source don't support INCREMENTAL and FULL_REFRESH - set first value from supportedSyncModes list + return updateStreamConfig({ + syncMode: streamNode.stream.supportedSyncModes[0], + }); + }), + }), + [schema.streams] + ); const getInitialTransformations = (operations: Operation[]): Transformation[] => operations.filter(isDbtTransformation); @@ -285,15 +277,7 @@ const useInitialValues = ( destDefinition: DestinationDefinitionSpecification, isEditMode?: boolean ): FormikConnectionFormValues => { - const initialSchema = useMemo( - () => - calculateInitialCatalog( - connection.syncCatalog, - destDefinition, - isEditMode - ), - [connection.syncCatalog, destDefinition, isEditMode] - ); + const initialSchema = useInitialSchema(connection.syncCatalog); return useMemo(() => { const initialValues: FormikConnectionFormValues = { @@ -350,7 +334,6 @@ const useFrequencyDropdownData = (): DropDownRow.IDataItem[] => { export type { ConnectionFormValues, FormikConnectionFormValues }; export { connectionValidationSchema, - calculateInitialCatalog, useInitialValues, useFrequencyDropdownData, mapFormPropsToOperation, diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx index 8b6ceab979b5..a50cc99979bf 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/ServiceForm.test.tsx @@ -1,3 +1,4 @@ +import React from "react"; import userEvent from "@testing-library/user-event"; import { getByTestId, screen, waitFor } from "@testing-library/react"; import selectEvent from "react-select-event"; @@ -7,6 +8,15 @@ import { render } from "utils/testutils"; import { ServiceFormValues } from "./types"; import { AirbyteJSONSchema } from "core/jsonSchema"; +// hack to fix tests. https://github.com/remarkjs/react-markdown/issues/635 +jest.mock( + "components/Markdown", + () => + function ReactMarkdown({ children }: React.PropsWithChildren) { + return <>{children}; + } +); + jest.setTimeout(10000); const schema: AirbyteJSONSchema = { From a1a13525272b4ceef804888ae236b137b9d1c5a5 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 11 Mar 2022 16:38:37 -0800 Subject: [PATCH 28/38] =?UTF-8?q?=F0=9F=8E=89=20upgrade=20dbt=20to=201.0.0?= =?UTF-8?q?=20(except=20for=20oracle=20and=20mysql)=20(#11051)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../bases/base-normalization/Dockerfile | 4 +-- .../base-normalization/clickhouse.Dockerfile | 7 ++-- .../dbt_project.yml | 32 +++++++++---------- .../packages.yml | 2 +- .../dbt_project.yml | 32 +++++++++---------- .../dbt-project-template-mssql/packages.yml | 2 +- .../dbt_project.yml | 10 +++--- .../dbt-project-template/dbt_project.yml | 32 +++++++++---------- .../dbt-project-template/packages.yml | 2 +- .../test_nested_streams/dbt_project.yml | 32 +++++++++---------- .../test_simple_streams/dbt_project.yml | 32 +++++++++---------- .../test_simple_streams/first_dbt_project.yml | 32 +++++++++---------- .../test_normalization/exchange_rate.sql | 2 +- .../test_normalization/exchange_rate.sql | 2 +- .../dedup_exchange_rate_scd.sql | 6 ++-- .../dedup_exchange_rate.sql | 6 ++-- .../test_normalization/exchange_rate.sql | 2 +- .../test_nested_streams/dbt_project.yml | 32 +++++++++---------- ...ream_with_c__lting_into_long_names_scd.sql | 11 ++++--- .../some_stream_that_was_empty_scd.sql | 11 ++++--- ...d_stream_with_c___long_names_partition.sql | 6 ++-- ...d_stream_with_c___names_partition_data.sql | 6 ++-- ...d_stream_with_c__ion_double_array_data.sql | 6 ++-- ...d_stream_with_c__lting_into_long_names.sql | 11 ++++--- ...ream_with_c__lting_into_long_names_stg.sql | 11 ++++--- .../some_stream_that_was_empty.sql | 11 ++++--- .../some_stream_that_was_empty_stg.sql | 11 ++++--- ...e_stream_with_n__lting_into_long_names.sql | 11 ++++--- .../test_simple_streams/dbt_project.yml | 32 +++++++++---------- .../test_simple_streams/first_dbt_project.yml | 32 +++++++++---------- .../1_prefix_startwith_number_scd.sql | 11 ++++--- .../dedup_cdc_excluded_scd.sql | 11 ++++--- .../dedup_exchange_rate_scd.sql | 11 ++++--- .../multiple_column_names_conflicts_scd.sql | 11 ++++--- .../test_normalization/pos_dedup_cdcx_scd.sql | 11 ++++--- .../renamed_dedup_cdc_excluded_scd.sql | 11 ++++--- .../1_prefix_startwith_number.sql | 11 ++++--- .../1_prefix_startwith_number_stg.sql | 11 ++++--- .../test_normalization/dedup_cdc_excluded.sql | 11 ++++--- .../dedup_cdc_excluded_stg.sql | 11 ++++--- .../dedup_exchange_rate.sql | 11 ++++--- .../dedup_exchange_rate_stg.sql | 11 ++++--- .../multiple_column_names_conflicts.sql | 11 ++++--- .../multiple_column_names_conflicts_stg.sql | 11 ++++--- .../test_normalization/pos_dedup_cdcx.sql | 11 ++++--- .../test_normalization/pos_dedup_cdcx_stg.sql | 11 ++++--- .../renamed_dedup_cdc_excluded.sql | 11 ++++--- .../renamed_dedup_cdc_excluded_stg.sql | 11 ++++--- .../dedup_exchange_rate_scd.sql | 13 ++++---- .../renamed_dedup_cdc_excluded_scd.sql | 13 ++++---- .../dedup_exchange_rate.sql | 13 ++++---- .../dedup_exchange_rate_stg.sql | 13 ++++---- .../renamed_dedup_cdc_excluded.sql | 13 ++++---- .../renamed_dedup_cdc_excluded_stg.sql | 13 ++++---- .../test_nested_streams/dbt_project.yml | 32 +++++++++---------- ..._columns_resulting_into_long_names_scd.sql | 1 + ...plex_columns_resulting_into_long_names.sql | 1 + ...ns_resulting_into_long_names_partition.sql | 1 + ...sulting_into_long_names_partition_data.sql | 1 + ...long_names_partition_double_array_data.sql | 1 + ..._columns_resulting_into_long_names_scd.sql | 11 ++++--- ...plex_columns_resulting_into_long_names.sql | 11 ++++--- ...ns_resulting_into_long_names_partition.sql | 6 ++-- ...sulting_into_long_names_partition_data.sql | 6 ++-- ...long_names_partition_double_array_data.sql | 6 ++-- .../test_simple_streams/dbt_project.yml | 32 +++++++++---------- .../test_simple_streams/first_dbt_project.yml | 32 +++++++++---------- .../dedup_exchange_rate_scd.sql | 1 + .../dedup_exchange_rate.sql | 1 + .../test_normalization/exchange_rate.sql | 1 + .../dedup_exchange_rate_scd.sql | 11 ++++--- .../dedup_exchange_rate.sql | 11 ++++--- .../test_normalization/exchange_rate.sql | 1 + .../dedup_exchange_rate_scd.sql | 13 ++++---- .../dedup_exchange_rate.sql | 13 ++++---- .../test_normalization/exchange_rate.sql | 1 + .../test_nested_streams/dbt_project.yml | 10 +++--- .../test_simple_streams/dbt_project.yml | 10 +++--- .../DEDUP_EXCHANGE_RATE_STG.sql | 4 ++- .../MULTIPLE_COLUMN_NAMES_CONFLICTS_STG.sql | 4 ++- .../DEDUP_EXCHANGE_RATE_STG.sql | 4 ++- .../integration_tests/test_normalization.py | 4 +-- .../bases/base-normalization/mssql.Dockerfile | 6 ++-- .../bases/base-normalization/mysql.Dockerfile | 1 + .../base-normalization/oracle.Dockerfile | 1 + .../bases/base-normalization/setup.py | 6 +--- .../base-normalization/snowflake.Dockerfile | 4 +-- .../NormalizationRunnerFactory.java | 2 +- .../basic-normalization.md | 2 ++ 89 files changed, 503 insertions(+), 453 deletions(-) diff --git a/airbyte-integrations/bases/base-normalization/Dockerfile b/airbyte-integrations/bases/base-normalization/Dockerfile index 7a969ad5ed0b..7b5882af96a2 100644 --- a/airbyte-integrations/bases/base-normalization/Dockerfile +++ b/airbyte-integrations/bases/base-normalization/Dockerfile @@ -1,4 +1,4 @@ -FROM fishtownanalytics/dbt:0.21.1 +FROM fishtownanalytics/dbt:1.0.0 COPY --from=airbyte/base-airbyte-protocol-python:0.1.1 /airbyte /airbyte # Install SSH Tunneling dependencies @@ -28,5 +28,5 @@ WORKDIR /airbyte ENV AIRBYTE_ENTRYPOINT "/airbyte/entrypoint.sh" ENTRYPOINT ["/airbyte/entrypoint.sh"] -LABEL io.airbyte.version=0.1.69 +LABEL io.airbyte.version=0.1.70 LABEL io.airbyte.name=airbyte/normalization diff --git a/airbyte-integrations/bases/base-normalization/clickhouse.Dockerfile b/airbyte-integrations/bases/base-normalization/clickhouse.Dockerfile index ba365769c958..1b9b0276fdc0 100644 --- a/airbyte-integrations/bases/base-normalization/clickhouse.Dockerfile +++ b/airbyte-integrations/bases/base-normalization/clickhouse.Dockerfile @@ -1,4 +1,4 @@ -FROM fishtownanalytics/dbt:0.21.0 +FROM fishtownanalytics/dbt:1.0.0 COPY --from=airbyte/base-airbyte-protocol-python:0.1.1 /airbyte /airbyte # Install SSH Tunneling dependencies @@ -20,10 +20,7 @@ WORKDIR /airbyte/normalization_code RUN pip install . WORKDIR /airbyte/normalization_code/dbt-template/ -#RUN pip install dbt-clickhouse -# dbt-clickhouse adapter has some bugs, use our own just for now -# https://github.com/silentsokolov/dbt-clickhouse/issues/20 -RUN pip install git+https://github.com/burmecia/dbt-clickhouse.git +RUN pip install dbt-clickhouse==1.0.0 # Download external dbt dependencies RUN dbt deps diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/dbt_project.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/dbt_project.yml index 37eca7fcd73f..083036c3ee94 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -61,4 +61,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/packages.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/packages.yml index 4b74445a3df3..33b4edd58c8c 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/packages.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template-clickhouse/packages.yml @@ -2,4 +2,4 @@ packages: - git: "https://github.com/fishtown-analytics/dbt-utils.git" - revision: 0.7.4 + revision: 0.8.2 diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/dbt_project.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/dbt_project.yml index 8c7494fdc58f..3aeabe88ab2a 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -58,4 +58,4 @@ models: +materialized: view vars: - dbt_utils_dispatch_list: ['airbyte_utils'] + dbt_utils_dispatch_list: ["airbyte_utils"] diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/packages.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/packages.yml index 4b74445a3df3..33b4edd58c8c 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/packages.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template-mssql/packages.yml @@ -2,4 +2,4 @@ packages: - git: "https://github.com/fishtown-analytics/dbt-utils.git" - revision: 0.7.4 + revision: 0.8.2 diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template-snowflake/dbt_project.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template-snowflake/dbt_project.yml index c22ddc2282c2..79e4498a7548 100644 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template-snowflake/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template-snowflake/dbt_project.yml @@ -13,18 +13,18 @@ config-version: 2 profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] target-path: "../build" # directory which will store compiled SQL files log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies clean-targets: # directories to be removed by `dbt clean` - "build" @@ -37,7 +37,7 @@ quoting: schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template/dbt_project.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template/dbt_project.yml index 9ad815875900..eb867a236e5c 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/dbt-project-template/packages.yml b/airbyte-integrations/bases/base-normalization/dbt-project-template/packages.yml index 4b74445a3df3..33b4edd58c8c 100755 --- a/airbyte-integrations/bases/base-normalization/dbt-project-template/packages.yml +++ b/airbyte-integrations/bases/base-normalization/dbt-project-template/packages.yml @@ -2,4 +2,4 @@ packages: - git: "https://github.com/fishtown-analytics/dbt-utils.git" - revision: 0.7.4 + revision: 0.8.2 diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml index 9ad815875900..eb867a236e5c 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_nested_streams/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml index 522b1e595e8a..7dcef9c28cd5 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["modified_models"] +model-paths: ["modified_models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml index 9ad815875900..eb867a236e5c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql index a06574b3c89e..068e8a1fcf6f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -114,4 +114,4 @@ from __dbt__cte__exchange_rate_ab3 -- exchange_rate from `dataline-integration-testing`.test_normalization._airbyte_raw_exchange_rate where 1 = 1 ); - \ No newline at end of file + \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql index a06574b3c89e..068e8a1fcf6f 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -114,4 +114,4 @@ from __dbt__cte__exchange_rate_ab3 -- exchange_rate from `dataline-integration-testing`.test_normalization._airbyte_raw_exchange_rate where 1 = 1 ); - \ No newline at end of file + \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 79e9dea40754..ac1136c84b7a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -15,13 +15,13 @@ when matched then update set - `_airbyte_unique_key` = DBT_INTERNAL_SOURCE.`_airbyte_unique_key`,`_airbyte_unique_key_scd` = DBT_INTERNAL_SOURCE.`_airbyte_unique_key_scd`,`id` = DBT_INTERNAL_SOURCE.`id`,`currency` = DBT_INTERNAL_SOURCE.`currency`,`date` = DBT_INTERNAL_SOURCE.`date`,`timestamp_col` = DBT_INTERNAL_SOURCE.`timestamp_col`,`HKD_special___characters` = DBT_INTERNAL_SOURCE.`HKD_special___characters`,`NZD` = DBT_INTERNAL_SOURCE.`NZD`,`USD` = DBT_INTERNAL_SOURCE.`USD`,`_airbyte_start_at` = DBT_INTERNAL_SOURCE.`_airbyte_start_at`,`_airbyte_end_at` = DBT_INTERNAL_SOURCE.`_airbyte_end_at`,`_airbyte_active_row` = DBT_INTERNAL_SOURCE.`_airbyte_active_row`,`_airbyte_ab_id` = DBT_INTERNAL_SOURCE.`_airbyte_ab_id`,`_airbyte_emitted_at` = DBT_INTERNAL_SOURCE.`_airbyte_emitted_at`,`_airbyte_normalized_at` = DBT_INTERNAL_SOURCE.`_airbyte_normalized_at`,`_airbyte_dedup_exchange_rate_hashid` = DBT_INTERNAL_SOURCE.`_airbyte_dedup_exchange_rate_hashid`,`new_column` = DBT_INTERNAL_SOURCE.`new_column` + `_airbyte_unique_key` = DBT_INTERNAL_SOURCE.`_airbyte_unique_key`,`_airbyte_unique_key_scd` = DBT_INTERNAL_SOURCE.`_airbyte_unique_key_scd`,`id` = DBT_INTERNAL_SOURCE.`id`,`currency` = DBT_INTERNAL_SOURCE.`currency`,`new_column` = DBT_INTERNAL_SOURCE.`new_column`,`date` = DBT_INTERNAL_SOURCE.`date`,`timestamp_col` = DBT_INTERNAL_SOURCE.`timestamp_col`,`HKD_special___characters` = DBT_INTERNAL_SOURCE.`HKD_special___characters`,`NZD` = DBT_INTERNAL_SOURCE.`NZD`,`USD` = DBT_INTERNAL_SOURCE.`USD`,`_airbyte_start_at` = DBT_INTERNAL_SOURCE.`_airbyte_start_at`,`_airbyte_end_at` = DBT_INTERNAL_SOURCE.`_airbyte_end_at`,`_airbyte_active_row` = DBT_INTERNAL_SOURCE.`_airbyte_active_row`,`_airbyte_ab_id` = DBT_INTERNAL_SOURCE.`_airbyte_ab_id`,`_airbyte_emitted_at` = DBT_INTERNAL_SOURCE.`_airbyte_emitted_at`,`_airbyte_normalized_at` = DBT_INTERNAL_SOURCE.`_airbyte_normalized_at`,`_airbyte_dedup_exchange_rate_hashid` = DBT_INTERNAL_SOURCE.`_airbyte_dedup_exchange_rate_hashid` when not matched then insert - (`_airbyte_unique_key`, `_airbyte_unique_key_scd`, `id`, `currency`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_start_at`, `_airbyte_end_at`, `_airbyte_active_row`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`, `new_column`) + (`_airbyte_unique_key`, `_airbyte_unique_key_scd`, `id`, `currency`, `new_column`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_start_at`, `_airbyte_end_at`, `_airbyte_active_row`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`) values - (`_airbyte_unique_key`, `_airbyte_unique_key_scd`, `id`, `currency`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_start_at`, `_airbyte_end_at`, `_airbyte_active_row`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`, `new_column`) + (`_airbyte_unique_key`, `_airbyte_unique_key_scd`, `id`, `currency`, `new_column`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_start_at`, `_airbyte_end_at`, `_airbyte_active_row`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index f5f7e5ca80df..a36197a213f4 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -15,13 +15,13 @@ when matched then update set - `_airbyte_unique_key` = DBT_INTERNAL_SOURCE.`_airbyte_unique_key`,`id` = DBT_INTERNAL_SOURCE.`id`,`currency` = DBT_INTERNAL_SOURCE.`currency`,`date` = DBT_INTERNAL_SOURCE.`date`,`timestamp_col` = DBT_INTERNAL_SOURCE.`timestamp_col`,`HKD_special___characters` = DBT_INTERNAL_SOURCE.`HKD_special___characters`,`NZD` = DBT_INTERNAL_SOURCE.`NZD`,`USD` = DBT_INTERNAL_SOURCE.`USD`,`_airbyte_ab_id` = DBT_INTERNAL_SOURCE.`_airbyte_ab_id`,`_airbyte_emitted_at` = DBT_INTERNAL_SOURCE.`_airbyte_emitted_at`,`_airbyte_normalized_at` = DBT_INTERNAL_SOURCE.`_airbyte_normalized_at`,`_airbyte_dedup_exchange_rate_hashid` = DBT_INTERNAL_SOURCE.`_airbyte_dedup_exchange_rate_hashid`,`new_column` = DBT_INTERNAL_SOURCE.`new_column` + `_airbyte_unique_key` = DBT_INTERNAL_SOURCE.`_airbyte_unique_key`,`id` = DBT_INTERNAL_SOURCE.`id`,`currency` = DBT_INTERNAL_SOURCE.`currency`,`new_column` = DBT_INTERNAL_SOURCE.`new_column`,`date` = DBT_INTERNAL_SOURCE.`date`,`timestamp_col` = DBT_INTERNAL_SOURCE.`timestamp_col`,`HKD_special___characters` = DBT_INTERNAL_SOURCE.`HKD_special___characters`,`NZD` = DBT_INTERNAL_SOURCE.`NZD`,`USD` = DBT_INTERNAL_SOURCE.`USD`,`_airbyte_ab_id` = DBT_INTERNAL_SOURCE.`_airbyte_ab_id`,`_airbyte_emitted_at` = DBT_INTERNAL_SOURCE.`_airbyte_emitted_at`,`_airbyte_normalized_at` = DBT_INTERNAL_SOURCE.`_airbyte_normalized_at`,`_airbyte_dedup_exchange_rate_hashid` = DBT_INTERNAL_SOURCE.`_airbyte_dedup_exchange_rate_hashid` when not matched then insert - (`_airbyte_unique_key`, `id`, `currency`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`, `new_column`) + (`_airbyte_unique_key`, `id`, `currency`, `new_column`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`) values - (`_airbyte_unique_key`, `id`, `currency`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`, `new_column`) + (`_airbyte_unique_key`, `id`, `currency`, `new_column`, `date`, `timestamp_col`, `HKD_special___characters`, `NZD`, `USD`, `_airbyte_ab_id`, `_airbyte_emitted_at`, `_airbyte_normalized_at`, `_airbyte_dedup_exchange_rate_hashid`) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql index 133b78a3854c..49688da71ec1 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/bigquery/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -114,4 +114,4 @@ from __dbt__cte__exchange_rate_ab3 -- exchange_rate from `dataline-integration-testing`.test_normalization._airbyte_raw_exchange_rate where 1 = 1 ); - \ No newline at end of file + \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml index 9ad815875900..eb867a236e5c 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql index 327e14609a57..7026a868cc5d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_c__lting_into_long_names_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."nested_stream_with_c__lting_into_long_names_scd" + + delete from "postgres".test_normalization."nested_stream_with_c__lting_into_long_names_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "nested_stream_with_c__lting_into_long_name__dbt_tmp" ); + insert into "postgres".test_normalization."nested_stream_with_c__lting_into_long_names_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "partition", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stre__nto_long_names_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "partition", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stre__nto_long_names_hashid" - from "nested_stream_with_c__lting_into_long_name__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "partition", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stre__nto_long_names_hashid" + from "nested_stream_with_c__lting_into_long_name__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql index ba1bf91f3e1f..cb4ff47eeea7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/some_stream_that_was_empty_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."some_stream_that_was_empty_scd" + + delete from "postgres".test_normalization."some_stream_that_was_empty_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "some_stream_that_was_empty_scd__dbt_tmp" ); + insert into "postgres".test_normalization."some_stream_that_was_empty_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_some_stream_that_was_empty_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_some_stream_that_was_empty_hashid" - from "some_stream_that_was_empty_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_some_stream_that_was_empty_hashid" + from "some_stream_that_was_empty_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql index 17af7dd9eb48..3a98824ffdd1 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___long_names_partition.sql @@ -3,7 +3,7 @@ insert into "postgres".test_normalization."nested_stream_with_c___long_names_partition" ("_airbyte_nested_stre__nto_long_names_hashid", "double_array_data", "DATA", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_partition_hashid") ( - select "_airbyte_nested_stre__nto_long_names_hashid", "double_array_data", "DATA", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_partition_hashid" - from "nested_stream_with_c___long_names_partitio__dbt_tmp" - ); + select "_airbyte_nested_stre__nto_long_names_hashid", "double_array_data", "DATA", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_partition_hashid" + from "nested_stream_with_c___long_names_partitio__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql index 5c12b7ee08da..b90c9cb238e0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c___names_partition_data.sql @@ -3,7 +3,7 @@ insert into "postgres".test_normalization."nested_stream_with_c___names_partition_data" ("_airbyte_partition_hashid", "currency", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_data_hashid") ( - select "_airbyte_partition_hashid", "currency", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_data_hashid" - from "nested_stream_with_c___names_partition_dat__dbt_tmp" - ); + select "_airbyte_partition_hashid", "currency", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_data_hashid" + from "nested_stream_with_c___names_partition_dat__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql index 8b12de7eda4a..98dfb2ba788b 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__ion_double_array_data.sql @@ -3,7 +3,7 @@ insert into "postgres".test_normalization."nested_stream_with_c__ion_double_array_data" ("_airbyte_partition_hashid", "id", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_double_array_data_hashid") ( - select "_airbyte_partition_hashid", "id", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_double_array_data_hashid" - from "nested_stream_with_c__ion_double_array_dat__dbt_tmp" - ); + select "_airbyte_partition_hashid", "id", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_double_array_data_hashid" + from "nested_stream_with_c__ion_double_array_dat__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql index 384d4a63247d..bf109e096b70 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."nested_stream_with_c__lting_into_long_names" + + delete from "postgres".test_normalization."nested_stream_with_c__lting_into_long_names" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "nested_stream_with_c__lting_into_long_name__dbt_tmp" ); + insert into "postgres".test_normalization."nested_stream_with_c__lting_into_long_names" ("_airbyte_unique_key", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stre__nto_long_names_hashid") ( - select "_airbyte_unique_key", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stre__nto_long_names_hashid" - from "nested_stream_with_c__lting_into_long_name__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stre__nto_long_names_hashid" + from "nested_stream_with_c__lting_into_long_name__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql index 9b73172c55bd..275461f97657 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_c__lting_into_long_names_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."nested_stream_with_c__lting_into_long_names_stg" + + delete from "postgres"._airbyte_test_normalization."nested_stream_with_c__lting_into_long_names_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "nested_stream_with_c__lting_into_long_name__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."nested_stream_with_c__lting_into_long_names_stg" ("_airbyte_nested_stre__nto_long_names_hashid", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_nested_stre__nto_long_names_hashid", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "nested_stream_with_c__lting_into_long_name__dbt_tmp" - ); + select "_airbyte_nested_stre__nto_long_names_hashid", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "nested_stream_with_c__lting_into_long_name__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql index ca596108dd1f..97759325fe3d 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."some_stream_that_was_empty" + + delete from "postgres".test_normalization."some_stream_that_was_empty" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "some_stream_that_was_empty__dbt_tmp" ); + insert into "postgres".test_normalization."some_stream_that_was_empty" ("_airbyte_unique_key", "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_some_stream_that_was_empty_hashid") ( - select "_airbyte_unique_key", "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_some_stream_that_was_empty_hashid" - from "some_stream_that_was_empty__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_some_stream_that_was_empty_hashid" + from "some_stream_that_was_empty__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql index 6bf13bfb92e9..a0aa7cb30dd6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization/some_stream_that_was_empty_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."some_stream_that_was_empty_stg" + + delete from "postgres"._airbyte_test_normalization."some_stream_that_was_empty_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "some_stream_that_was_empty_stg__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."some_stream_that_was_empty_stg" ("_airbyte_some_stream_that_was_empty_hashid", "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_some_stream_that_was_empty_hashid", "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "some_stream_that_was_empty_stg__dbt_tmp" - ); + select "_airbyte_some_stream_that_was_empty_hashid", "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "some_stream_that_was_empty_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql index 7fbbe6459fa0..b3397712e600 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_nested_streams/second_output/airbyte_incremental/test_normalization_namespace/simple_stream_with_n__lting_into_long_names.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization_namespace."simple_stream_with_n__lting_into_long_names" + + delete from "postgres".test_normalization_namespace."simple_stream_with_n__lting_into_long_names" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "simple_stream_with_n__lting_into_long_name__dbt_tmp" ); + insert into "postgres".test_normalization_namespace."simple_stream_with_n__lting_into_long_names" ("id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_simple_stre__nto_long_names_hashid") ( - select "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_simple_stre__nto_long_names_hashid" - from "simple_stream_with_n__lting_into_long_name__dbt_tmp" - ); + select "id", "date", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_simple_stre__nto_long_names_hashid" + from "simple_stream_with_n__lting_into_long_name__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml index 522b1e595e8a..7dcef9c28cd5 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["modified_models"] +model-paths: ["modified_models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml index 9ad815875900..eb867a236e5c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/first_dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql index 3eb10166dd31..6fe661c181e0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/1_prefix_startwith_number_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."1_prefix_startwith_number_scd" + + delete from "postgres".test_normalization."1_prefix_startwith_number_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "1_prefix_startwith_number_scd__dbt_tmp" ); + insert into "postgres".test_normalization."1_prefix_startwith_number_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "text", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_1_prefix_startwith_number_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "text", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_1_prefix_startwith_number_hashid" - from "1_prefix_startwith_number_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "text", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_1_prefix_startwith_number_hashid" + from "1_prefix_startwith_number_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql index ff39afab32ca..a1fba0a6d7ff 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_cdc_excluded_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."dedup_cdc_excluded_scd" + + delete from "postgres".test_normalization."dedup_cdc_excluded_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "dedup_cdc_excluded_scd__dbt_tmp" ); + insert into "postgres".test_normalization."dedup_cdc_excluded_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid" - from "dedup_cdc_excluded_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid" + from "dedup_cdc_excluded_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index fb60e5523174..0155cd0360b1 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."dedup_exchange_rate_scd" + + delete from "postgres".test_normalization."dedup_exchange_rate_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "dedup_exchange_rate_scd__dbt_tmp" ); + insert into "postgres".test_normalization."dedup_exchange_rate_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" - from "dedup_exchange_rate_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql index 77ba6202fe81..76e853912437 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/multiple_column_names_conflicts_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."multiple_column_names_conflicts_scd" + + delete from "postgres".test_normalization."multiple_column_names_conflicts_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "multiple_column_names_conflicts_scd__dbt_tmp" ); + insert into "postgres".test_normalization."multiple_column_names_conflicts_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_multiple_co__ames_conflicts_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_multiple_co__ames_conflicts_hashid" - from "multiple_column_names_conflicts_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_multiple_co__ames_conflicts_hashid" + from "multiple_column_names_conflicts_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql index ebe6ae45a576..cafd98c1c127 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/pos_dedup_cdcx_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."pos_dedup_cdcx_scd" + + delete from "postgres".test_normalization."pos_dedup_cdcx_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "pos_dedup_cdcx_scd__dbt_tmp" ); + insert into "postgres".test_normalization."pos_dedup_cdcx_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_pos_dedup_cdcx_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_pos_dedup_cdcx_hashid" - from "pos_dedup_cdcx_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_pos_dedup_cdcx_hashid" + from "pos_dedup_cdcx_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql index 3ea9e1c724fb..e6d1d5fd0160 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."renamed_dedup_cdc_excluded_scd" + + delete from "postgres".test_normalization."renamed_dedup_cdc_excluded_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "renamed_dedup_cdc_excluded_scd__dbt_tmp" ); + insert into "postgres".test_normalization."renamed_dedup_cdc_excluded_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "_ab_cdc_updated_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "_ab_cdc_updated_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid" - from "renamed_dedup_cdc_excluded_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "_ab_cdc_updated_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid" + from "renamed_dedup_cdc_excluded_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql index a95bdd8ef269..181af872ea06 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."1_prefix_startwith_number" + + delete from "postgres".test_normalization."1_prefix_startwith_number" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "1_prefix_startwith_number__dbt_tmp" ); + insert into "postgres".test_normalization."1_prefix_startwith_number" ("_airbyte_unique_key", "id", "date", "text", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_1_prefix_startwith_number_hashid") ( - select "_airbyte_unique_key", "id", "date", "text", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_1_prefix_startwith_number_hashid" - from "1_prefix_startwith_number__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "date", "text", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_1_prefix_startwith_number_hashid" + from "1_prefix_startwith_number__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql index c69b8a655495..d9a69c73ea41 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/1_prefix_startwith_number_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."1_prefix_startwith_number_stg" + + delete from "postgres"._airbyte_test_normalization."1_prefix_startwith_number_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "1_prefix_startwith_number_stg__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."1_prefix_startwith_number_stg" ("_airbyte_1_prefix_startwith_number_hashid", "id", "date", "text", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_1_prefix_startwith_number_hashid", "id", "date", "text", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "1_prefix_startwith_number_stg__dbt_tmp" - ); + select "_airbyte_1_prefix_startwith_number_hashid", "id", "date", "text", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "1_prefix_startwith_number_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql index c905b883e251..b3012059b462 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."dedup_cdc_excluded" + + delete from "postgres".test_normalization."dedup_cdc_excluded" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "dedup_cdc_excluded__dbt_tmp" ); + insert into "postgres".test_normalization."dedup_cdc_excluded" ("_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid") ( - select "_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid" - from "dedup_cdc_excluded__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_cdc_excluded_hashid" + from "dedup_cdc_excluded__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql index 2f89a8be61da..d9f833d441bf 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_cdc_excluded_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."dedup_cdc_excluded_stg" + + delete from "postgres"._airbyte_test_normalization."dedup_cdc_excluded_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "dedup_cdc_excluded_stg__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."dedup_cdc_excluded_stg" ("_airbyte_dedup_cdc_excluded_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_dedup_cdc_excluded_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "dedup_cdc_excluded_stg__dbt_tmp" - ); + select "_airbyte_dedup_cdc_excluded_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "dedup_cdc_excluded_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index cc0c70d79775..871b95c607c9 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."dedup_exchange_rate" + + delete from "postgres".test_normalization."dedup_exchange_rate" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "dedup_exchange_rate__dbt_tmp" ); + insert into "postgres".test_normalization."dedup_exchange_rate" ("_airbyte_unique_key", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" - from "dedup_exchange_rate__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql index 52b404234f41..1be7a088845e 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."dedup_exchange_rate_stg" + + delete from "postgres"._airbyte_test_normalization."dedup_exchange_rate_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "dedup_exchange_rate_stg__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."dedup_exchange_rate_stg" ("_airbyte_dedup_exchange_rate_hashid", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_dedup_exchange_rate_hashid", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "dedup_exchange_rate_stg__dbt_tmp" - ); + select "_airbyte_dedup_exchange_rate_hashid", "id", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "dedup_exchange_rate_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql index 55bd30540c22..525dc2add907 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."multiple_column_names_conflicts" + + delete from "postgres".test_normalization."multiple_column_names_conflicts" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "multiple_column_names_conflicts__dbt_tmp" ); + insert into "postgres".test_normalization."multiple_column_names_conflicts" ("_airbyte_unique_key", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_multiple_co__ames_conflicts_hashid") ( - select "_airbyte_unique_key", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_multiple_co__ames_conflicts_hashid" - from "multiple_column_names_conflicts__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_multiple_co__ames_conflicts_hashid" + from "multiple_column_names_conflicts__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql index 9acf3e0a0ee3..391889ecb40a 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/multiple_column_names_conflicts_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."multiple_column_names_conflicts_stg" + + delete from "postgres"._airbyte_test_normalization."multiple_column_names_conflicts_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "multiple_column_names_conflicts_stg__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."multiple_column_names_conflicts_stg" ("_airbyte_multiple_co__ames_conflicts_hashid", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_multiple_co__ames_conflicts_hashid", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "multiple_column_names_conflicts_stg__dbt_tmp" - ); + select "_airbyte_multiple_co__ames_conflicts_hashid", "id", "User Id", "user_id", "User id", "user id", "User@Id", "userid", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "multiple_column_names_conflicts_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql index 6da0cd1efa17..1d618406e5c6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."pos_dedup_cdcx" + + delete from "postgres".test_normalization."pos_dedup_cdcx" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "pos_dedup_cdcx__dbt_tmp" ); + insert into "postgres".test_normalization."pos_dedup_cdcx" ("_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_pos_dedup_cdcx_hashid") ( - select "_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_pos_dedup_cdcx_hashid" - from "pos_dedup_cdcx__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_pos_dedup_cdcx_hashid" + from "pos_dedup_cdcx__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql index 1d42123687e9..c627c7bea1b0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/pos_dedup_cdcx_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."pos_dedup_cdcx_stg" + + delete from "postgres"._airbyte_test_normalization."pos_dedup_cdcx_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "pos_dedup_cdcx_stg__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."pos_dedup_cdcx_stg" ("_airbyte_pos_dedup_cdcx_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_pos_dedup_cdcx_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "pos_dedup_cdcx_stg__dbt_tmp" - ); + select "_airbyte_pos_dedup_cdcx_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_ab_cdc_log_pos", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "pos_dedup_cdcx_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql index 3fec1976ed9d..de66b557fa18 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."renamed_dedup_cdc_excluded" + + delete from "postgres".test_normalization."renamed_dedup_cdc_excluded" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "renamed_dedup_cdc_excluded__dbt_tmp" ); + insert into "postgres".test_normalization."renamed_dedup_cdc_excluded" ("_airbyte_unique_key", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid") ( - select "_airbyte_unique_key", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid" - from "renamed_dedup_cdc_excluded__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid" + from "renamed_dedup_cdc_excluded__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql index 5f39006c6490..6711170dbc9c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/second_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."renamed_dedup_cdc_excluded_stg" + + delete from "postgres"._airbyte_test_normalization."renamed_dedup_cdc_excluded_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "renamed_dedup_cdc_excluded_stg__dbt_tmp" ); + insert into "postgres"._airbyte_test_normalization."renamed_dedup_cdc_excluded_stg" ("_airbyte_renamed_dedup_cdc_excluded_hashid", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_renamed_dedup_cdc_excluded_hashid", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" - from "renamed_dedup_cdc_excluded_stg__dbt_tmp" - ); + select "_airbyte_renamed_dedup_cdc_excluded_hashid", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "renamed_dedup_cdc_excluded_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index a5de1de2333d..521c016411b8 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."dedup_exchange_rate_scd" + + delete from "postgres".test_normalization."dedup_exchange_rate_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "dedup_exchange_rate_scd__dbt_tmp" ); + - insert into "postgres".test_normalization."dedup_exchange_rate_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id") + insert into "postgres".test_normalization."dedup_exchange_rate_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "new_column", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id" - from "dedup_exchange_rate_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "new_column", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql index dfe10c6da794..3a30f5175e85 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/renamed_dedup_cdc_excluded_scd.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."renamed_dedup_cdc_excluded_scd" + + delete from "postgres".test_normalization."renamed_dedup_cdc_excluded_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "renamed_dedup_cdc_excluded_scd__dbt_tmp" ); + - insert into "postgres".test_normalization."renamed_dedup_cdc_excluded_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "_ab_cdc_updated_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid", "name", "_ab_cdc_lsn", "_ab_cdc_deleted_at") + insert into "postgres".test_normalization."renamed_dedup_cdc_excluded_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "_ab_cdc_updated_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid", "name", "_ab_cdc_lsn", "_ab_cdc_deleted_at" - from "renamed_dedup_cdc_excluded_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid" + from "renamed_dedup_cdc_excluded_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index ecc81c8883b0..9c85a5929377 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."dedup_exchange_rate" + + delete from "postgres".test_normalization."dedup_exchange_rate" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "dedup_exchange_rate__dbt_tmp" ); + - insert into "postgres".test_normalization."dedup_exchange_rate" ("_airbyte_unique_key", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id") + insert into "postgres".test_normalization."dedup_exchange_rate" ("_airbyte_unique_key", "id", "currency", "new_column", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id" - from "dedup_exchange_rate__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "currency", "new_column", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql index f35951198e0f..1cca43917331 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."dedup_exchange_rate_stg" + + delete from "postgres"._airbyte_test_normalization."dedup_exchange_rate_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "dedup_exchange_rate_stg__dbt_tmp" ); + - insert into "postgres"._airbyte_test_normalization."dedup_exchange_rate_stg" ("_airbyte_dedup_exchange_rate_hashid", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "new_column", "id", "usd") + insert into "postgres"._airbyte_test_normalization."dedup_exchange_rate_stg" ("_airbyte_dedup_exchange_rate_hashid", "id", "currency", "new_column", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_dedup_exchange_rate_hashid", "currency", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "new_column", "id", "usd" - from "dedup_exchange_rate_stg__dbt_tmp" - ); + select "_airbyte_dedup_exchange_rate_hashid", "id", "currency", "new_column", "date", "timestamp_col", "HKD@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "dedup_exchange_rate_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql index c1d1c310179d..f4ce2e830582 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded.sql @@ -1,14 +1,15 @@ - delete - from "postgres".test_normalization."renamed_dedup_cdc_excluded" + + delete from "postgres".test_normalization."renamed_dedup_cdc_excluded" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "renamed_dedup_cdc_excluded__dbt_tmp" ); + - insert into "postgres".test_normalization."renamed_dedup_cdc_excluded" ("_airbyte_unique_key", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid", "name", "_ab_cdc_lsn", "_ab_cdc_deleted_at") + insert into "postgres".test_normalization."renamed_dedup_cdc_excluded" ("_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid") ( - select "_airbyte_unique_key", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid", "name", "_ab_cdc_lsn", "_ab_cdc_deleted_at" - from "renamed_dedup_cdc_excluded__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_renamed_dedup_cdc_excluded_hashid" + from "renamed_dedup_cdc_excluded__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql index 55db812277ae..18d5b4ab827c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/postgres/test_simple_streams/third_output/airbyte_incremental/test_normalization/renamed_dedup_cdc_excluded_stg.sql @@ -1,14 +1,15 @@ - delete - from "postgres"._airbyte_test_normalization."renamed_dedup_cdc_excluded_stg" + + delete from "postgres"._airbyte_test_normalization."renamed_dedup_cdc_excluded_stg" where (_airbyte_ab_id) in ( select (_airbyte_ab_id) from "renamed_dedup_cdc_excluded_stg__dbt_tmp" ); + - insert into "postgres"._airbyte_test_normalization."renamed_dedup_cdc_excluded_stg" ("_airbyte_renamed_dedup_cdc_excluded_hashid", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "name", "_ab_cdc_lsn", "_ab_cdc_deleted_at") + insert into "postgres"._airbyte_test_normalization."renamed_dedup_cdc_excluded_stg" ("_airbyte_renamed_dedup_cdc_excluded_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at") ( - select "_airbyte_renamed_dedup_cdc_excluded_hashid", "id", "_ab_cdc_updated_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "name", "_ab_cdc_lsn", "_ab_cdc_deleted_at" - from "renamed_dedup_cdc_excluded_stg__dbt_tmp" - ); + select "_airbyte_renamed_dedup_cdc_excluded_hashid", "id", "name", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at" + from "renamed_dedup_cdc_excluded_stg__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml index 9ad815875900..eb867a236e5c 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql index 0f50d2944487..caeba18c2477 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql @@ -6,6 +6,7 @@ compound sortkey(_airbyte_active_row,_airbyte_unique_key_scd,_airbyte_emitted_at) + as ( -- depends_on: ref('nested_stream_with_complex_columns_resulting_into_long_names_stg') diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql index 7cc7e57d7252..6961ef2198dc 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql @@ -6,6 +6,7 @@ compound sortkey(_airbyte_unique_key,_airbyte_emitted_at) + as ( -- Final base SQL model diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql index 540c237472f1..25a5f72a235c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql @@ -6,6 +6,7 @@ compound sortkey(_airbyte_emitted_at) + as ( with __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_ab1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql index cd290444f4b2..0cd481382f10 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql @@ -6,6 +6,7 @@ compound sortkey(_airbyte_emitted_at) + as ( with __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_data_ab1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql index 57d7b695e202..0ef9e77bb055 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/first_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql @@ -6,6 +6,7 @@ compound sortkey(_airbyte_emitted_at) + as ( with __dbt__cte__nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data_ab1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql index b3b47010b2a0..a3986d744fa8 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/scd/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_scd.sql @@ -1,14 +1,15 @@ - delete - from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_scd" + + delete from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "nested_stream_with_complex_columns_resulti__dbt_tmp" ); + insert into "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "partition", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "partition", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid" - from "nested_stream_with_complex_columns_resulti__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "date", "partition", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid" + from "nested_stream_with_complex_columns_resulti__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql index d307b3232ef7..842e9a979c68 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names.sql @@ -1,14 +1,15 @@ - delete - from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names" + + delete from "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "nested_stream_with_complex_columns_resulti__dbt_tmp" ); + insert into "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names" ("_airbyte_unique_key", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid") ( - select "_airbyte_unique_key", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid" - from "nested_stream_with_complex_columns_resulti__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "date", "partition", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid" + from "nested_stream_with_complex_columns_resulti__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql index 5b6aa3e7994a..9bf3ef8552d7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition.sql @@ -3,7 +3,7 @@ insert into "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition" ("_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid", "double_array_data", "data", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_partition_hashid") ( - select "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid", "double_array_data", "data", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_partition_hashid" - from "nested_stream_with_complex_columns_resulti__dbt_tmp" - ); + select "_airbyte_nested_stream_with_complex_columns_resulting_into_long_names_hashid", "double_array_data", "data", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_partition_hashid" + from "nested_stream_with_complex_columns_resulti__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql index 6ce45aa678ca..ec83ef6267a0 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_data.sql @@ -3,7 +3,7 @@ insert into "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition_data" ("_airbyte_partition_hashid", "currency", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_data_hashid") ( - select "_airbyte_partition_hashid", "currency", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_data_hashid" - from "nested_stream_with_complex_columns_resulti__dbt_tmp" - ); + select "_airbyte_partition_hashid", "currency", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_data_hashid" + from "nested_stream_with_complex_columns_resulti__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql index 491418bbabac..05efdbcc1ce2 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_nested_streams/second_output/airbyte_incremental/test_normalization/nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data.sql @@ -3,7 +3,7 @@ insert into "integrationtests".test_normalization."nested_stream_with_complex_columns_resulting_into_long_names_partition_double_array_data" ("_airbyte_partition_hashid", "id", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_double_array_data_hashid") ( - select "_airbyte_partition_hashid", "id", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_double_array_data_hashid" - from "nested_stream_with_complex_columns_resulti__dbt_tmp" - ); + select "_airbyte_partition_hashid", "id", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_double_array_data_hashid" + from "nested_stream_with_complex_columns_resulti__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml index 522b1e595e8a..7dcef9c28cd5 100755 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["modified_models"] +model-paths: ["modified_models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml index 9ad815875900..eb867a236e5c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_dbt_project.yml @@ -4,40 +4,40 @@ # Name your package! Package names should contain only lowercase characters # and underscores. A good package name should reflect your organization's # name or the intended use of these models -name: 'airbyte_utils' -version: '1.0' +name: "airbyte_utils" +version: "1.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. Profiles contain # database connection information, and should be configured in the ~/.dbt/profiles.yml file -profile: 'normalize' +profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] -target-path: "../build" # directory which will store compiled SQL files -log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +target-path: "../build" # directory which will store compiled SQL files +log-path: "../logs" # directory which will store DBT logs +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies -clean-targets: # directories to be removed by `dbt clean` - - "build" - - "dbt_modules" +clean-targets: # directories to be removed by `dbt clean` + - "build" + - "dbt_modules" quoting: database: true -# Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) -# all schemas should be unquoted + # Temporarily disabling the behavior of the ExtendedNameTransformer on table/schema names, see (issue #1785) + # all schemas should be unquoted schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: @@ -60,4 +60,4 @@ models: dispatch: - macro_namespace: dbt_utils - search_order: ['airbyte_utils', 'dbt_utils'] + search_order: ["airbyte_utils", "dbt_utils"] diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index e2fb4b8024b0..14ff0512e8af 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -6,6 +6,7 @@ compound sortkey(_airbyte_active_row,_airbyte_unique_key_scd,_airbyte_emitted_at) + as ( -- depends_on: ref('dedup_exchange_rate_stg') diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 4b8fa21709fb..b7670479fa0c 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -6,6 +6,7 @@ compound sortkey(_airbyte_unique_key,_airbyte_emitted_at) + as ( -- Final base SQL model diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql index 0302e318b718..0d13846cdfd4 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/first_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -5,6 +5,7 @@ compound sortkey(_airbyte_emitted_at) + as ( with __dbt__cte__exchange_rate_ab1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 53aa2cf9aca6..fd784e2bb931 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -1,14 +1,15 @@ - delete - from "integrationtests".test_normalization."dedup_exchange_rate_scd" + + delete from "integrationtests".test_normalization."dedup_exchange_rate_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "dedup_exchange_rate_scd__dbt_tmp" ); + insert into "integrationtests".test_normalization."dedup_exchange_rate_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" - from "dedup_exchange_rate_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 977b187c317b..0cda4f2641d5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -1,14 +1,15 @@ - delete - from "integrationtests".test_normalization."dedup_exchange_rate" + + delete from "integrationtests".test_normalization."dedup_exchange_rate" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "dedup_exchange_rate__dbt_tmp" ); + insert into "integrationtests".test_normalization."dedup_exchange_rate" ("_airbyte_unique_key", "id", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "id", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" - from "dedup_exchange_rate__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "hkd_special___characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql index 0302e318b718..0d13846cdfd4 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/second_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -5,6 +5,7 @@ compound sortkey(_airbyte_emitted_at) + as ( with __dbt__cte__exchange_rate_ab1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql index 35192e5d2a73..0e0d5f46f449 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/scd/test_normalization/dedup_exchange_rate_scd.sql @@ -1,14 +1,15 @@ - delete - from "integrationtests".test_normalization."dedup_exchange_rate_scd" + + delete from "integrationtests".test_normalization."dedup_exchange_rate_scd" where (_airbyte_unique_key_scd) in ( select (_airbyte_unique_key_scd) from "dedup_exchange_rate_scd__dbt_tmp" ); + - insert into "integrationtests".test_normalization."dedup_exchange_rate_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id") + insert into "integrationtests".test_normalization."dedup_exchange_rate_scd" ("_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "new_column", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "_airbyte_unique_key_scd", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id" - from "dedup_exchange_rate_scd__dbt_tmp" - ); + select "_airbyte_unique_key", "_airbyte_unique_key_scd", "id", "currency", "new_column", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_start_at", "_airbyte_end_at", "_airbyte_active_row", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate_scd__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql index 47aaec909812..d506183c78f7 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_incremental/test_normalization/dedup_exchange_rate.sql @@ -1,14 +1,15 @@ - delete - from "integrationtests".test_normalization."dedup_exchange_rate" + + delete from "integrationtests".test_normalization."dedup_exchange_rate" where (_airbyte_unique_key) in ( select (_airbyte_unique_key) from "dedup_exchange_rate__dbt_tmp" ); + - insert into "integrationtests".test_normalization."dedup_exchange_rate" ("_airbyte_unique_key", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id") + insert into "integrationtests".test_normalization."dedup_exchange_rate" ("_airbyte_unique_key", "id", "currency", "new_column", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid") ( - select "_airbyte_unique_key", "currency", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid", "new_column", "id" - from "dedup_exchange_rate__dbt_tmp" - ); + select "_airbyte_unique_key", "id", "currency", "new_column", "date", "timestamp_col", "hkd@spéçiäl & characters", "nzd", "usd", "_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_normalized_at", "_airbyte_dedup_exchange_rate_hashid" + from "dedup_exchange_rate__dbt_tmp" + ) \ No newline at end of file diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql index b459b001a531..5fb76893d793 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/redshift/test_simple_streams/third_output/airbyte_tables/test_normalization/exchange_rate.sql @@ -5,6 +5,7 @@ compound sortkey(_airbyte_emitted_at) + as ( with __dbt__cte__exchange_rate_ab1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/dbt_project.yml index c22ddc2282c2..79e4498a7548 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/dbt_project.yml @@ -13,18 +13,18 @@ config-version: 2 profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] target-path: "../build" # directory which will store compiled SQL files log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies clean-targets: # directories to be removed by `dbt clean` - "build" @@ -37,7 +37,7 @@ quoting: schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/dbt_project.yml b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/dbt_project.yml index c22ddc2282c2..79e4498a7548 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/dbt_project.yml +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/dbt_project.yml @@ -13,18 +13,18 @@ config-version: 2 profile: "normalize" # These configurations specify where dbt should look for different types of files. -# The `source-paths` config, for example, states that source models can be found +# The `model-paths` config, for example, states that source models can be found # in the "models/" directory. You probably won't need to change these! -source-paths: ["models"] +model-paths: ["models"] docs-paths: ["docs"] analysis-paths: ["analysis"] test-paths: ["tests"] -data-paths: ["data"] +seed-paths: ["data"] macro-paths: ["macros"] target-path: "../build" # directory which will store compiled SQL files log-path: "../logs" # directory which will store DBT logs -modules-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies +packages-install-path: "/tmp/dbt_modules" # directory which will store external DBT dependencies clean-targets: # directories to be removed by `dbt clean` - "build" @@ -37,7 +37,7 @@ quoting: schema: false identifier: true -# You can define configurations for models in the `source-paths` directory here. +# You can define configurations for models in the `model-paths` directory here. # Using these configurations, you can enable or disable models, change how they # are materialized, and more! models: diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql index ec267a669005..6e3a836710ab 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql @@ -1,5 +1,7 @@ - create or replace view "AIRBYTE_DATABASE"._AIRBYTE_TEST_NORMALIZATION."DEDUP_EXCHANGE_RATE_STG" as ( + create or replace view "AIRBYTE_DATABASE"._AIRBYTE_TEST_NORMALIZATION."DEDUP_EXCHANGE_RATE_STG" + + as ( with __dbt__cte__DEDUP_EXCHANGE_RATE_AB1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/MULTIPLE_COLUMN_NAMES_CONFLICTS_STG.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/MULTIPLE_COLUMN_NAMES_CONFLICTS_STG.sql index c9a26f11445b..0d8620316af6 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/MULTIPLE_COLUMN_NAMES_CONFLICTS_STG.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/first_output/airbyte_views/TEST_NORMALIZATION/MULTIPLE_COLUMN_NAMES_CONFLICTS_STG.sql @@ -1,5 +1,7 @@ - create or replace view "AIRBYTE_DATABASE"._AIRBYTE_TEST_NORMALIZATION."MULTIPLE_COLUMN_NAMES_CONFLICTS_STG" as ( + create or replace view "AIRBYTE_DATABASE"._AIRBYTE_TEST_NORMALIZATION."MULTIPLE_COLUMN_NAMES_CONFLICTS_STG" + + as ( with __dbt__cte__MULTIPLE_COLUMN_NAMES_CONFLICTS_AB1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/second_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/second_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql index ec267a669005..6e3a836710ab 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/second_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql +++ b/airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_simple_streams/second_output/airbyte_views/TEST_NORMALIZATION/DEDUP_EXCHANGE_RATE_STG.sql @@ -1,5 +1,7 @@ - create or replace view "AIRBYTE_DATABASE"._AIRBYTE_TEST_NORMALIZATION."DEDUP_EXCHANGE_RATE_STG" as ( + create or replace view "AIRBYTE_DATABASE"._AIRBYTE_TEST_NORMALIZATION."DEDUP_EXCHANGE_RATE_STG" + + as ( with __dbt__cte__DEDUP_EXCHANGE_RATE_AB1 as ( diff --git a/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py b/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py index 0af945ede630..da21a548eff5 100644 --- a/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py +++ b/airbyte-integrations/bases/base-normalization/integration_tests/test_normalization.py @@ -249,8 +249,8 @@ def setup_schema_change_data(destination_type: DestinationType, test_resource_na dbt_test_utils.copy_replace( os.path.join(test_root_dir, "first_dbt_project.yml"), os.path.join(test_root_dir, "dbt_project.yml"), - pattern=r'source-paths: \["models"\]', - replace_value='source-paths: ["modified_models"]', + pattern=r'model-paths: \["models"\]', + replace_value='model-paths: ["modified_models"]', ) # Run a sync to update raw tables in destinations return run_destination_process(destination_type, test_root_dir, message_file, "destination_catalog.json") diff --git a/airbyte-integrations/bases/base-normalization/mssql.Dockerfile b/airbyte-integrations/bases/base-normalization/mssql.Dockerfile index e379782b5019..a5f30879f862 100644 --- a/airbyte-integrations/bases/base-normalization/mssql.Dockerfile +++ b/airbyte-integrations/bases/base-normalization/mssql.Dockerfile @@ -1,4 +1,4 @@ -FROM fishtownanalytics/dbt:0.21.1 +FROM fishtownanalytics/dbt:1.0.0 COPY --from=airbyte/base-airbyte-protocol-python:0.1.1 /airbyte /airbyte # Install curl & gnupg dependencies @@ -53,8 +53,8 @@ RUN pip install . WORKDIR /airbyte/normalization_code RUN pip install . -# Based of https://github.com/dbt-msft/dbt-sqlserver/tree/v0.21.1 -RUN pip install dbt-sqlserver==0.21.1 +# Based of https://github.com/dbt-msft/dbt-sqlserver/tree/v1.0.0 +RUN pip install dbt-sqlserver==1.0.0 WORKDIR /airbyte/normalization_code/dbt-template/ # Download external dbt dependencies diff --git a/airbyte-integrations/bases/base-normalization/mysql.Dockerfile b/airbyte-integrations/bases/base-normalization/mysql.Dockerfile index f0f1d4124fa9..e0928a591bab 100644 --- a/airbyte-integrations/bases/base-normalization/mysql.Dockerfile +++ b/airbyte-integrations/bases/base-normalization/mysql.Dockerfile @@ -1,3 +1,4 @@ +# As of today, dbt-mysql doesn't support 1.0.0 FROM fishtownanalytics/dbt:0.19.0 COPY --from=airbyte/base-airbyte-protocol-python:0.1.1 /airbyte /airbyte diff --git a/airbyte-integrations/bases/base-normalization/oracle.Dockerfile b/airbyte-integrations/bases/base-normalization/oracle.Dockerfile index da6749f1d266..236e36c2ac23 100644 --- a/airbyte-integrations/bases/base-normalization/oracle.Dockerfile +++ b/airbyte-integrations/bases/base-normalization/oracle.Dockerfile @@ -1,3 +1,4 @@ +# As of today, dbt-oracle doesn't support 1.0.0 FROM fishtownanalytics/dbt:0.19.1 USER root diff --git a/airbyte-integrations/bases/base-normalization/setup.py b/airbyte-integrations/bases/base-normalization/setup.py index cab07e57764a..a8da6449374e 100644 --- a/airbyte-integrations/bases/base-normalization/setup.py +++ b/airbyte-integrations/bases/base-normalization/setup.py @@ -12,11 +12,7 @@ author_email="contact@airbyte.io", url="https://github.com/airbytehq/airbyte", packages=setuptools.find_packages(), - install_requires=[ - "airbyte-protocol", - "pyyaml", - "jinja2", - ], + install_requires=["airbyte-protocol", "pyyaml", "jinja2", "types-PyYAML"], package_data={"": ["*.yml"]}, setup_requires=["pytest-runner"], entry_points={ diff --git a/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile b/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile index aa7349270dd3..8fd374047988 100644 --- a/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile +++ b/airbyte-integrations/bases/base-normalization/snowflake.Dockerfile @@ -1,4 +1,4 @@ -FROM fishtownanalytics/dbt:0.21.1 +FROM fishtownanalytics/dbt:1.0.0 COPY --from=airbyte/base-airbyte-protocol-python:0.1.1 /airbyte /airbyte # Install SSH Tunneling dependencies @@ -29,5 +29,5 @@ WORKDIR /airbyte ENV AIRBYTE_ENTRYPOINT "/airbyte/entrypoint.sh" ENTRYPOINT ["/airbyte/entrypoint.sh"] -LABEL io.airbyte.version=0.1.62 +LABEL io.airbyte.version=0.1.70 LABEL io.airbyte.name=airbyte/normalization-snowflake diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java index f7a23168e384..1ab4e0998190 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java @@ -14,7 +14,7 @@ public class NormalizationRunnerFactory { public static final String BASE_NORMALIZATION_IMAGE_NAME = "airbyte/normalization"; - public static final String NORMALIZATION_VERSION = "0.1.69"; + public static final String NORMALIZATION_VERSION = "0.1.70"; static final Map> NORMALIZATION_MAPPING = ImmutableMap.>builder() diff --git a/docs/understanding-airbyte/basic-normalization.md b/docs/understanding-airbyte/basic-normalization.md index cb72ef1142a2..6dd81a13cade 100644 --- a/docs/understanding-airbyte/basic-normalization.md +++ b/docs/understanding-airbyte/basic-normalization.md @@ -350,6 +350,8 @@ Therefore, in order to "upgrade" to the desired normalization version, you need | Airbyte Version | Normalization Version | Date | Pull Request | Subject | |:----------------| :--- | :--- | :--- | :--- | +| 0.35.49-alpha | 0.1.70 | 2022-03-11 | [\#11051](https://github.com/airbytehq/airbyte/pull/11051) | Upgrade dbt to 1.0.0 (except for MySQL and Oracle) | +| 0.35.45-alpha | 0.1.69 | 2022-03-04 | [\#10754](https://github.com/airbytehq/airbyte/pull/10754) | Enable Clickhouse normalization over SSL | | 0.35.32-alpha | 0.1.68 | 2022-02-20 | [\#10485](https://github.com/airbytehq/airbyte/pull/10485) | Fix row size too large for table with numerous `string` fields | | | 0.1.66 | 2022-02-04 | [\#9341](https://github.com/airbytehq/airbyte/pull/9341) | Fix normalization for bigquery datasetId and tables | | 0.35.13-alpha | 0.1.65 | 2021-01-28 | [\#9846](https://github.com/airbytehq/airbyte/pull/9846) | Tweak dbt multi-thread parameter down | From 94065597a5a91dd5cb3623b71c245f6496957f0d Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Sat, 12 Mar 2022 01:53:26 +0100 Subject: [PATCH 29/38] Bump Airbyte version from 0.35.52-alpha to 0.35.53-alpha (#11085) Co-authored-by: benmoriceau --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 4 ++-- airbyte-container-orchestrator/Dockerfile | 6 +++--- airbyte-metrics/reporter/Dockerfile | 4 ++-- airbyte-scheduler/app/Dockerfile | 4 ++-- airbyte-server/Dockerfile | 4 ++-- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 4 ++-- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ 18 files changed, 44 insertions(+), 44 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 1f97b7fd455d..4dbeac162e2d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.35.52-alpha +current_version = 0.35.53-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index bd361ceff14b..c372b59b2273 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.35.52-alpha +VERSION=0.35.53-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 61d505732da5..f03c4b65b790 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -5,6 +5,6 @@ ENV APPLICATION airbyte-bootloader WORKDIR /app -ADD bin/${APPLICATION}-0.35.52-alpha.tar /app +ADD bin/${APPLICATION}-0.35.53-alpha.tar /app -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}"] diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 0220ffe424d5..c6b26893c9dd 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,12 +26,12 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl ENV APPLICATION airbyte-container-orchestrator -ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}" +ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}" WORKDIR /app # Move orchestrator app -ADD bin/${APPLICATION}-0.35.52-alpha.tar /app +ADD bin/${APPLICATION}-0.35.53-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}"] diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 5fb87cf843bc..058c36d2a2f3 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-metrics-reporter WORKDIR /app -ADD bin/${APPLICATION}-0.35.52-alpha.tar /app +ADD bin/${APPLICATION}-0.35.53-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}"] diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index 0a272a39ef69..378a7b1ae12d 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-scheduler WORKDIR /app -ADD bin/${APPLICATION}-0.35.52-alpha.tar /app +ADD bin/${APPLICATION}-0.35.53-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index eeac4953e2fe..8c65252cac31 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -7,7 +7,7 @@ ENV APPLICATION airbyte-server WORKDIR /app -ADD bin/${APPLICATION}-0.35.52-alpha.tar /app +ADD bin/${APPLICATION}-0.35.53-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}"] diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index c0a5fb9e8e46..fd81a0d7974d 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.35.52-alpha", + "version": "0.35.53-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.35.52-alpha", + "version": "0.35.53-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^1.2.36", "@fortawesome/free-brands-svg-icons": "^5.15.4", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index b6e60790f84e..06e30eaabdf2 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.35.52-alpha", + "version": "0.35.53-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 2f2bc9df128f..14ac95da665b 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -30,7 +30,7 @@ ENV APPLICATION airbyte-workers WORKDIR /app # Move worker app -ADD bin/${APPLICATION}-0.35.52-alpha.tar /app +ADD bin/${APPLICATION}-0.35.53-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.52-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.53-alpha/bin/${APPLICATION}"] diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index c7add842b161..6032a5fcace3 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.35.52-alpha" +appVersion: "0.35.53-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 640e671de08e..4b3975fc6540 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -29,7 +29,7 @@ | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.53-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -71,7 +71,7 @@ | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.53-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -118,7 +118,7 @@ | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.53-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -156,7 +156,7 @@ | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.53-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -188,7 +188,7 @@ | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.52-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.53-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 9f3c106b2ef0..788b3b3c43df 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.35.52-alpha + tag: 0.35.53-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.35.52-alpha + tag: 0.35.53-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.35.52-alpha + tag: 0.35.53-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.35.52-alpha + tag: 0.35.53-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.35.52-alpha + tag: 0.35.53-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index cf731df7c3d5..931559f4e9ef 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -101,7 +101,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.35.52-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.35.53-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index fb98c8e0eae6..472e9a81e925 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.52-alpha +AIRBYTE_VERSION=0.35.53-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 01d959cbd377..e746c3862bea 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/bootloader - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/scheduler - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/server - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/webapp - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/worker - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 298a0caa67bc..022fe045e9e7 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.52-alpha +AIRBYTE_VERSION=0.35.53-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 26f6258758ba..d469046f8300 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/bootloader - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/scheduler - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/server - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/webapp - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: airbyte/worker - newTag: 0.35.52-alpha + newTag: 0.35.53-alpha - name: temporalio/auto-setup newTag: 1.7.0 From f2d274f51a6b6e9fd3c0c624cdff29d7fae74d08 Mon Sep 17 00:00:00 2001 From: Charles Date: Sat, 12 Mar 2022 15:45:16 -0800 Subject: [PATCH 30/38] add pod name to logs when KubePodProcess fails because pod was not found (#9290) --- .../workers/process/KubePodProcess.java | 52 ++++++++++++------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java index 339277295132..e296dc86a637 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/KubePodProcess.java @@ -99,6 +99,7 @@ public class KubePodProcess extends Process implements KubePod { private static final Logger LOGGER = LoggerFactory.getLogger(KubePodProcess.class); + public static final String MAIN_CONTAINER_NAME = "main"; private static final String INIT_CONTAINER_NAME = "init"; private static final String DEFAULT_MEMORY_REQUEST = "25Mi"; private static final String DEFAULT_MEMORY_LIMIT = "50Mi"; @@ -117,7 +118,6 @@ public class KubePodProcess extends Process implements KubePod { private static final String TERMINATION_FILE_MAIN = TERMINATION_DIR + "/main"; private static final String TERMINATION_FILE_CHECK = TERMINATION_DIR + "/check"; public static final String SUCCESS_FILE_NAME = "FINISHED_UPLOADING"; - public static final String MAIN_CONTAINER_NAME = "main"; // 143 is the typical SIGTERM exit code. private static final int KILLED_EXIT_CODE = 143; @@ -152,10 +152,10 @@ public class KubePodProcess extends Process implements KubePod { private final int stderrLocalPort; private final ExecutorService executorService; - public static String getPodIP(final KubernetesClient client, final String podName, final String namespace) { - final var pod = client.pods().inNamespace(namespace).withName(podName).get(); + public static String getPodIP(final KubernetesClient client, final String podName, final String podNamespace) { + final var pod = client.pods().inNamespace(podNamespace).withName(podName).get(); if (pod == null) { - throw new RuntimeException("Error: unable to find pod!"); + throw new RuntimeException(prependPodInfo("Error: unable to find pod!", podNamespace, podName)); } return pod.getStatus().getPodIP(); } @@ -617,13 +617,16 @@ public boolean waitFor(final long timeout, final TimeUnit unit) throws Interrupt */ @Override public void destroy() { - LOGGER.info("Destroying Kube process: {}", podDefinition.getMetadata().getName()); + final String podName = podDefinition.getMetadata().getName(); + final String podNamespace = podDefinition.getMetadata().getNamespace(); + + LOGGER.info(prependPodInfo("Destroying Kube process.", podNamespace, podName)); try { fabricClient.resource(podDefinition).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); wasKilled.set(true); } finally { close(); - LOGGER.info("Destroyed Kube process: {}", podDefinition.getMetadata().getName()); + LOGGER.info(prependPodInfo("Destroyed Kube process.", podNamespace, podName)); } } @@ -668,7 +671,7 @@ private void close() { KubePortManagerSingleton.getInstance().offer(stdoutLocalPort); KubePortManagerSingleton.getInstance().offer(stderrLocalPort); - LOGGER.debug("Closed {}", podDefinition.getMetadata().getName()); + LOGGER.debug(prependPodInfo("Closed", podDefinition.getMetadata().getNamespace(), podDefinition.getMetadata().getName())); } public static boolean isTerminal(final Pod pod) { @@ -696,27 +699,34 @@ private int getReturnCode(final Pod pod) { return returnCode; } - // Reuse the last status check result to prevent overloading the Kube Api server. + final String podName = pod.getMetadata().getName(); + final String podNamespace = pod.getMetadata().getNamespace(); + + // If there is no return code (see above) and we have checked the status recently, assume the pod is + // still running. We do this to avoid overloading the Kube Api server. if (lastStatusCheck != null && System.currentTimeMillis() - lastStatusCheck < statusCheckInterval.toMillis()) { - throw new IllegalThreadStateException("Kube pod process has not exited yet."); + throw new IllegalThreadStateException(prependPodInfo("Kube pod process has not exited yet. (cached)", podNamespace, podName)); } - final var name = pod.getMetadata().getName(); - final Pod refreshedPod = fabricClient.pods().inNamespace(pod.getMetadata().getNamespace()).withName(name).get(); + final Pod refreshedPod = fabricClient.pods().inNamespace(podNamespace).withName(podName).get(); if (refreshedPod == null) { if (wasKilled.get()) { - LOGGER.info("Unable to find pod {} to retrieve exit value. Defaulting to value {}. This is expected if the job was cancelled.", name, - KILLED_EXIT_CODE); + LOGGER.info(prependPodInfo( + String.format("Unable to find pod to retrieve exit value. Defaulting to value %s. This is expected if the job was cancelled.", + KILLED_EXIT_CODE), + podNamespace, podName)); return KILLED_EXIT_CODE; } // If the pod cannot be found and was not killed, it either means 1) the pod was not created // properly 2) this method is incorrectly called. - throw new RuntimeException("Cannot find pod while trying to retrieve exit code. This probably means the Pod was not correctly created."); + throw new RuntimeException( + prependPodInfo("Cannot find pod %s : %s while trying to retrieve exit code. This probably means the pod was not correctly created.", + podNamespace, podName)); } if (!isTerminal(refreshedPod)) { lastStatusCheck = System.currentTimeMillis(); - throw new IllegalThreadStateException("Kube pod process has not exited yet."); + throw new IllegalThreadStateException(prependPodInfo("Kube pod process has not exited yet.", podNamespace, podName)); } final ContainerStatus mainContainerStatus = refreshedPod.getStatus().getContainerStatuses() @@ -725,12 +735,12 @@ private int getReturnCode(final Pod pod) { .collect(MoreCollectors.onlyElement()); if (mainContainerStatus.getState() == null || mainContainerStatus.getState().getTerminated() == null) { - throw new IllegalThreadStateException("Main container in kube pod has not terminated yet."); + throw new IllegalThreadStateException(prependPodInfo("Main container in kube pod has not terminated yet.", podNamespace, podName)); } returnCode = mainContainerStatus.getState().getTerminated().getExitCode(); - LOGGER.info("Exit code for pod {} is {}", name, returnCode); + LOGGER.info(prependPodInfo(String.format("Exit code for pod is %s", returnCode), podNamespace, podName)); return returnCode; } @@ -745,7 +755,9 @@ public int exitValue() { // Further, since the local resources are used to talk to Kubernetes resources, shut local resources // down after Kubernetes resources are shut down, regardless of Kube termination status. close(); - LOGGER.info("Closed all resources for pod {}", podDefinition.getMetadata().getName()); + LOGGER.info(prependPodInfo("Closed all resources for pod", + podDefinition.getMetadata().getNamespace(), + podDefinition.getMetadata().getName())); return returnCode; } @@ -773,4 +785,8 @@ public static ResourceRequirementsBuilder getResourceRequirementsBuilder(final R return null; } + private static String prependPodInfo(final String message, final String podNamespace, final String podName) { + return String.format("(pod: %s / %s) - %s", podNamespace, podName, message); + } + } From 348fc6e7c1c10d8729b497827356f4e9b9ca388c Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Sun, 13 Mar 2022 11:48:56 +0800 Subject: [PATCH 31/38] Reporter App Monitoring. (#11074) Add a metric to monitor the monitoring app's rate of publishing metrics. Though this isn't perfect, it gives us some insight into whether metric publishing is okay or running into issues. --- .../io/airbyte/commons/lang/Exceptions.java | 16 --------- .../airbyte/metrics/lib/MetricsRegistry.java | 4 +++ .../airbyte/metrics/reporter/ReporterApp.java | 2 +- .../io/airbyte/metrics/reporter/ToEmit.java | 34 +++++++++++++++---- 4 files changed, 32 insertions(+), 24 deletions(-) diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java b/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java index a2417c77d964..00a1bffc7ce5 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/lang/Exceptions.java @@ -41,22 +41,6 @@ public static void toRuntime(final Procedure voidCallable) { castCheckedToRuntime(voidCallable, RuntimeException::new); } - /** - * Return a Runnable that logs anonymous function exceptions. - * - * @param voidCallable - * @return - */ - public static Runnable toSwallowExceptionRunnable(final Procedure voidCallable) { - return () -> { - try { - voidCallable.call(); - } catch (Exception e) { - log.error("Exception: ", e); - } - }; - } - public static void toIllegalState(final Procedure voidCallable) { castCheckedToRuntime(voidCallable, IllegalStateException::new); } diff --git a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java index 936eeb5ac3a1..456a6274f8da 100644 --- a/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java +++ b/airbyte-metrics/lib/src/main/java/io/airbyte/metrics/lib/MetricsRegistry.java @@ -47,6 +47,10 @@ public enum MetricsRegistry { MetricEmittingApps.WORKER, "attempt_succeeded_by_release_stage", "increments when an attempts succeeds. attempts are double counted as this is tagged by release stage."), + EST_NUM_METRICS_EMITTED_BY_REPORTER( + MetricEmittingApps.METRICS_REPORTER, + "est_num_metrics_emitted_by_reporter", + "estimated metrics emitted by the reporter in the last interval. this is estimated since the count is not precise."), JOB_CANCELLED_BY_RELEASE_STAGE( MetricEmittingApps.WORKER, "job_cancelled_by_release_stage", diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java index c03d53596f43..be2a17ae645a 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ReporterApp.java @@ -20,7 +20,7 @@ public class ReporterApp { public static Database configDatabase; - public static void main(final String[] args) throws IOException, InterruptedException { + public static void main(final String[] args) throws IOException { final Configs configs = new EnvConfigs(); DogStatsDMetricSingleton.initialize(MetricEmittingApps.METRICS_REPORTER, new DatadogClientConfiguration(configs)); diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java index 8a746d875359..a7115bc1d128 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/ToEmit.java @@ -4,7 +4,7 @@ package io.airbyte.metrics.reporter; -import io.airbyte.commons.lang.Exceptions; +import io.airbyte.commons.lang.Exceptions.Procedure; import io.airbyte.db.instance.jobs.jooq.enums.JobStatus; import io.airbyte.metrics.lib.DogStatsDMetricSingleton; import io.airbyte.metrics.lib.MetricQueries; @@ -12,37 +12,39 @@ import io.airbyte.metrics.lib.MetricsRegistry; import java.util.concurrent.TimeUnit; import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; /** * This class contains all metrics emitted by the {@link ReporterApp}. */ +@Slf4j @AllArgsConstructor public enum ToEmit { - NUM_PENDING_JOBS(Exceptions.toSwallowExceptionRunnable(() -> { + NUM_PENDING_JOBS(countMetricEmission(() -> { final var pendingJobs = ReporterApp.configDatabase.query(MetricQueries::numberOfPendingJobs); DogStatsDMetricSingleton.gauge(MetricsRegistry.NUM_PENDING_JOBS, pendingJobs); })), - NUM_RUNNING_JOBS(Exceptions.toSwallowExceptionRunnable(() -> { + NUM_RUNNING_JOBS(countMetricEmission(() -> { final var runningJobs = ReporterApp.configDatabase.query(MetricQueries::numberOfRunningJobs); DogStatsDMetricSingleton.gauge(MetricsRegistry.NUM_RUNNING_JOBS, runningJobs); })), - OLDEST_RUNNING_JOB_AGE_SECS(Exceptions.toSwallowExceptionRunnable(() -> { + OLDEST_RUNNING_JOB_AGE_SECS(countMetricEmission(() -> { final var age = ReporterApp.configDatabase.query(MetricQueries::oldestRunningJobAgeSecs); DogStatsDMetricSingleton.gauge(MetricsRegistry.OLDEST_RUNNING_JOB_AGE_SECS, age); })), - OLDEST_PENDING_JOB_AGE_SECS(Exceptions.toSwallowExceptionRunnable(() -> { + OLDEST_PENDING_JOB_AGE_SECS(countMetricEmission(() -> { final var age = ReporterApp.configDatabase.query(MetricQueries::oldestPendingJobAgeSecs); DogStatsDMetricSingleton.gauge(MetricsRegistry.OLDEST_PENDING_JOB_AGE_SECS, age); })), - NUM_ACTIVE_CONN_PER_WORKSPACE(Exceptions.toSwallowExceptionRunnable(() -> { + NUM_ACTIVE_CONN_PER_WORKSPACE(countMetricEmission(() -> { final var age = ReporterApp.configDatabase.query(MetricQueries::numberOfActiveConnPerWorkspace); for (long count : age) { DogStatsDMetricSingleton.percentile(MetricsRegistry.NUM_ACTIVE_CONN_PER_WORKSPACE, count); } })), - OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS(Exceptions.toSwallowExceptionRunnable(() -> { + OVERALL_JOB_RUNTIME_IN_LAST_HOUR_BY_TERMINAL_STATE_SECS(countMetricEmission(() -> { final var times = ReporterApp.configDatabase.query(MetricQueries::overallJobRuntimeForTerminalJobsInLastHour); for (Pair pair : times) { DogStatsDMetricSingleton.recordTimeGlobal( @@ -59,4 +61,22 @@ public enum ToEmit { this(toEmit, 15, TimeUnit.SECONDS); } + /** + * Wrapper callable to handle 1) query exception logging and 2) counting metric emissions so + * reporter app can be monitored too. + * + * @param metricQuery + * @return + */ + private static Runnable countMetricEmission(Procedure metricQuery) { + return () -> { + try { + metricQuery.call(); + DogStatsDMetricSingleton.count(MetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); + } catch (Exception e) { + log.error("Exception querying database for metric: ", e); + } + }; + } + } From 5347ff172e95d83aca72ab966b05bdab1f0fc562 Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Sun, 13 Mar 2022 17:22:59 +0800 Subject: [PATCH 32/38] Initial script to start removing zombie build instances. (#11088) * Set up script to remove old instances. * Switch to push for testing. * Debugging. * Authenticate. * Try again. * Set region. * Add JQ. * Add iso check. * Add curr time. Add JQ query. * Correctly set env. * Format command. * Format command. * Should be inverted. * Only get running. * Filter for instance name. * Space instead of comma. * Debug. * Put back running instance filter. * Undo changes. * Test. * Move comment. --- .github/workflows/gradle.yml | 1 - .../terminate-zombie-build-instances.yml | 27 +++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/terminate-zombie-build-instances.yml diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index d6f72d81eaf2..0b3ed9d47136 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -368,7 +368,6 @@ jobs: label: ${{ needs.start-platform-new-scheduler-acceptance-runner.outputs.label }} ec2-instance-id: ${{ needs.start-platform-new-scheduler-acceptance-runner.outputs.ec2-instance-id }} - # In case of self-hosted EC2 errors, remove this block. stop-platform-build-runner: name: "Platform: Stop Build EC2 Runner" diff --git a/.github/workflows/terminate-zombie-build-instances.yml b/.github/workflows/terminate-zombie-build-instances.yml new file mode 100644 index 000000000000..9065743b9d04 --- /dev/null +++ b/.github/workflows/terminate-zombie-build-instances.yml @@ -0,0 +1,27 @@ +name: Terminate Zombie Build Instances + +on: + push: + schedule: + - cron: "0 */1 * * *" + +jobs: + terminate: + runs-on: ubuntu-latest + steps: + - name: List Instances Older Than an Hour + env: + AWS_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + # See https://github.com/aws/aws-cli/issues/5623 + AWS_EC2_METADATA_DISABLED: true + run: | + set -euxo pipefail + + aws configure set default.region us-east-2 + + # Since the AWS cli returns an ISO HH:MM timestamp, and Jq only accepts Z timestamps, we define a function toZ to convert this. + aws ec2 describe-instances --no-paginate --filters Name=instance-type,Values=c5.2xlarge Name=instance-state-name,Values=running \ + --query 'Reservations[*].Instances[*].{Instance:InstanceId,LaunchTime:LaunchTime}' --output json \ + | jq 'def toZ(str): str | (split("+")[0] + "Z") | fromdate ; + flatten | map( { InstanceId: .Instance, LaunchTime: toZ(.LaunchTime) } ) | map( select ( .LaunchTime < (now - 3600) ) )' From f5204d7bd7768d6acbfd3d2c22cff45ed8dc93b4 Mon Sep 17 00:00:00 2001 From: Charles Date: Sun, 13 Mar 2022 10:19:19 -0700 Subject: [PATCH 33/38] Extract secrets handling out of ConfigRepository (#8898) --- .../io/airbyte/bootloader/BootloaderApp.java | 2 +- .../config/persistence/ConfigRepository.java | 359 +++++------------- .../persistence/SecretsRepositoryReader.java | 99 +++++ .../persistence/SecretsRepositoryWriter.java | 267 +++++++++++++ .../MemorySecretPersistence.java | 4 + .../split_secrets/SecretsHelpers.java | 16 +- .../ConfigRepositoryE2EReadWriteTest.java | 22 +- .../persistence/ConfigRepositoryTest.java | 7 +- .../SecretsRepositoryReaderTest.java | 119 ++++++ .../SecretsRepositoryWriterTest.java | 215 +++++++++++ .../airbyte/scheduler/app/SchedulerApp.java | 9 +- .../persistence/WorkspaceHelperTest.java | 5 - .../io/airbyte/server/ConfigDumpExporter.java | 14 +- .../io/airbyte/server/ConfigDumpImporter.java | 15 +- .../server/ConfigurationApiFactory.java | 10 + .../java/io/airbyte/server/ServerApp.java | 10 +- .../java/io/airbyte/server/ServerFactory.java | 8 + .../airbyte/server/apis/ConfigurationApi.java | 22 +- .../converters/ConfigurationUpdate.java | 15 +- .../server/handlers/ArchiveHandler.java | 8 +- .../server/handlers/DestinationHandler.java | 20 +- .../server/handlers/SchedulerHandler.java | 18 +- .../server/handlers/SourceHandler.java | 20 +- .../server/ConfigDumpImporterTest.java | 38 +- .../server/apis/ConfigurationApiTest.java | 4 + .../converters/ConfigurationUpdateTest.java | 9 +- .../server/handlers/ArchiveHandlerTest.java | 34 +- .../handlers/DestinationHandlerTest.java | 21 +- .../server/handlers/SchedulerHandlerTest.java | 26 +- .../server/handlers/SourceHandlerTest.java | 25 +- .../java/io/airbyte/workers/WorkerApp.java | 5 +- 31 files changed, 1060 insertions(+), 386 deletions(-) create mode 100644 airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java create mode 100644 airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryWriter.java create mode 100644 airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java create mode 100644 airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java index 2b473d5c1753..3e38348c2931 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java @@ -114,7 +114,7 @@ public void load() throws Exception { final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase); final ConfigRepository configRepository = - new ConfigRepository(configPersistence, null, Optional.empty(), Optional.empty(), configDatabase); + new ConfigRepository(configPersistence, configDatabase); createWorkspaceIfNoneExists(configRepository); LOGGER.info("Default workspace created.."); diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index a9428d84521b..4f88e2f7b5f4 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -16,7 +16,6 @@ import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; import io.airbyte.commons.lang.MoreBooleans; import io.airbyte.config.ActorCatalog; import io.airbyte.config.ActorCatalogFetchEvent; @@ -33,22 +32,15 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHelpers; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; -import io.airbyte.config.persistence.split_secrets.SplitSecretConfig; import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import io.airbyte.db.instance.configs.jooq.enums.ActorType; import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.time.OffsetDateTime; import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -67,35 +59,24 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") public class ConfigRepository { private static final Logger LOGGER = LoggerFactory.getLogger(ConfigRepository.class); - private static final UUID NO_WORKSPACE = UUID.fromString("00000000-0000-0000-0000-000000000000"); - private final ConfigPersistence persistence; - private final SecretsHydrator secretsHydrator; - private final Optional longLivedSecretPersistence; - private final Optional ephemeralSecretPersistence; private final ExceptionWrappingDatabase database; - public ConfigRepository(final ConfigPersistence persistence, - final SecretsHydrator secretsHydrator, - final Optional longLivedSecretPersistence, - final Optional ephemeralSecretPersistence, - final Database database) { - this.persistence = persistence; - this.secretsHydrator = secretsHydrator; - this.longLivedSecretPersistence = longLivedSecretPersistence; - this.ephemeralSecretPersistence = ephemeralSecretPersistence; - this.database = new ExceptionWrappingDatabase(database); - } - + // todo (cgardens) - very bad that this exposed. usages should be removed. do not use it. + @Deprecated public ExceptionWrappingDatabase getDatabase() { return database; } + public ConfigRepository(final ConfigPersistence persistence, final Database database) { + this.persistence = persistence; + this.database = new ExceptionWrappingDatabase(database);; + } + public StandardWorkspace getStandardWorkspace(final UUID workspaceId, final boolean includeTombstone) throws JsonValidationException, IOException, ConfigNotFoundException { final StandardWorkspace workspace = persistence.getConfig(ConfigSchema.STANDARD_WORKSPACE, workspaceId.toString(), StandardWorkspace.class); @@ -318,170 +299,89 @@ private void deleteConnectorDefinitionAndAssociations( persistence.deleteConfig(definitionType, definitionId.toString()); } - public SourceConnection getSourceConnection(final UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException { - return persistence.getConfig(ConfigSchema.SOURCE_CONNECTION, sourceId.toString(), SourceConnection.class); - } - - public SourceConnection getSourceConnectionWithSecrets(final UUID sourceId) throws JsonValidationException, IOException, ConfigNotFoundException { - final var source = getSourceConnection(sourceId); - final var fullConfig = secretsHydrator.hydrate(source.getConfiguration()); - return Jsons.clone(source).withConfiguration(fullConfig); - } - - private Optional getOptionalSourceConnection(final UUID sourceId) throws JsonValidationException, IOException { - try { - return Optional.of(getSourceConnection(sourceId)); - } catch (final ConfigNotFoundException e) { - return Optional.empty(); - } - } - - public void writeSourceConnection(final SourceConnection source, final ConnectorSpecification connectorSpecification) - throws JsonValidationException, IOException { - // actual validation is only for sanity checking - final JsonSchemaValidator validator = new JsonSchemaValidator(); - validator.ensure(connectorSpecification.getConnectionSpecification(), source.getConfiguration()); - - final var previousSourceConnection = getOptionalSourceConnection(source.getSourceId()) - .map(SourceConnection::getConfiguration); - - final var partialConfig = - statefulUpdateSecrets(source.getWorkspaceId(), previousSourceConnection, source.getConfiguration(), connectorSpecification); - final var partialSource = Jsons.clone(source).withConfiguration(partialConfig); - - persistence.writeConfig(ConfigSchema.SOURCE_CONNECTION, source.getSourceId().toString(), partialSource); - } - /** - * @param workspaceId workspace id for the config - * @param fullConfig full config - * @param spec connector specification - * @return partial config + * Returns source with a given id. Does not contain secrets. To hydrate with secrets see { @link + * SecretsRepositoryReader#getSourceConnectionWithSecrets(final UUID sourceId) }. + * + * @param sourceId - id of source to fetch. + * @return sources + * @throws JsonValidationException - throws if returned sources are invalid + * @throws IOException - you never know when you IO + * @throws ConfigNotFoundException - throws if no source with that id can be found. */ - public JsonNode statefulSplitSecrets(final UUID workspaceId, final JsonNode fullConfig, final ConnectorSpecification spec) { - return splitSecretConfig(workspaceId, fullConfig, spec, longLivedSecretPersistence); + public SourceConnection getSourceConnection(final UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException { + return persistence.getConfig(ConfigSchema.SOURCE_CONNECTION, sourceId.toString(), SourceConnection.class); } /** - * @param workspaceId workspace id for the config - * @param oldConfig old full config - * @param fullConfig new full config - * @param spec connector specification - * @return partial config + * MUST NOT ACCEPT SECRETS - Should only be called from { @link SecretsRepositoryWriter } + * + * Write a SourceConnection to the database. The configuration of the Source will be a partial + * configuration (no secrets, just pointer to the secrets store). + * + * @param partialSource - The configuration of the Source will be a partial configuration (no + * secrets, just pointer to the secrets store) + * @throws JsonValidationException - throws is the source is invalid + * @throws IOException - you never know when you IO */ - public JsonNode statefulUpdateSecrets(final UUID workspaceId, - final Optional oldConfig, - final JsonNode fullConfig, - final ConnectorSpecification spec) { - if (longLivedSecretPersistence.isPresent()) { - if (oldConfig.isPresent()) { - final var splitSecretConfig = SecretsHelpers.splitAndUpdateConfig( - workspaceId, - oldConfig.get(), - fullConfig, - spec, - longLivedSecretPersistence.get()); - - splitSecretConfig.getCoordinateToPayload().forEach(longLivedSecretPersistence.get()::write); - - return splitSecretConfig.getPartialConfig(); - } else { - final var splitSecretConfig = SecretsHelpers.splitConfig( - workspaceId, - fullConfig, - spec); - - splitSecretConfig.getCoordinateToPayload().forEach(longLivedSecretPersistence.get()::write); - - return splitSecretConfig.getPartialConfig(); - } - } else { - return fullConfig; - } + public void writeSourceConnectionNoSecrets(final SourceConnection partialSource) throws JsonValidationException, IOException { + persistence.writeConfig(ConfigSchema.SOURCE_CONNECTION, partialSource.getSourceId().toString(), partialSource); } /** - * @param fullConfig full config - * @param spec connector specification - * @return partial config + * Returns all sources in the database. Does not contain secrets. To hydrate with secrets see + * { @link SecretsRepositoryReader#listSourceConnectionWithSecrets() }. + * + * @return sources + * @throws JsonValidationException - throws if returned sources are invalid + * @throws IOException - you never know when you IO */ - public JsonNode statefulSplitEphemeralSecrets(final JsonNode fullConfig, final ConnectorSpecification spec) { - return splitSecretConfig(NO_WORKSPACE, fullConfig, spec, ephemeralSecretPersistence); - } - - private JsonNode splitSecretConfig(final UUID workspaceId, - final JsonNode fullConfig, - final ConnectorSpecification spec, - final Optional secretPersistence) { - if (secretPersistence.isPresent()) { - final SplitSecretConfig splitSecretConfig = SecretsHelpers.splitConfig(workspaceId, fullConfig, spec); - splitSecretConfig.getCoordinateToPayload().forEach(secretPersistence.get()::write); - return splitSecretConfig.getPartialConfig(); - } else { - return fullConfig; - } - } - public List listSourceConnection() throws JsonValidationException, IOException { return persistence.listConfigs(ConfigSchema.SOURCE_CONNECTION, SourceConnection.class); } - public List listSourceConnectionWithSecrets() throws JsonValidationException, IOException { - final var sources = listSourceConnection(); - - return sources.stream() - .map(partialSource -> Exceptions.toRuntime(() -> getSourceConnectionWithSecrets(partialSource.getSourceId()))) - .collect(Collectors.toList()); - } - + /** + * Returns destination with a given id. Does not contain secrets. To hydrate with secrets see + * { @link SecretsRepositoryReader#getDestinationConnectionWithSecrets(final UUID destinationId) }. + * + * @param destinationId - id of destination to fetch. + * @return destinations + * @throws JsonValidationException - throws if returned destinations are invalid + * @throws IOException - you never know when you IO + * @throws ConfigNotFoundException - throws if no destination with that id can be found. + */ public DestinationConnection getDestinationConnection(final UUID destinationId) throws JsonValidationException, IOException, ConfigNotFoundException { return persistence.getConfig(ConfigSchema.DESTINATION_CONNECTION, destinationId.toString(), DestinationConnection.class); } - public DestinationConnection getDestinationConnectionWithSecrets(final UUID destinationId) - throws JsonValidationException, IOException, ConfigNotFoundException { - final var destination = getDestinationConnection(destinationId); - final var fullConfig = secretsHydrator.hydrate(destination.getConfiguration()); - return Jsons.clone(destination).withConfiguration(fullConfig); - } - - private Optional getOptionalDestinationConnection(final UUID destinationId) throws JsonValidationException, IOException { - try { - return Optional.of(getDestinationConnection(destinationId)); - } catch (final ConfigNotFoundException e) { - return Optional.empty(); - } - } - - public void writeDestinationConnection(final DestinationConnection destination, final ConnectorSpecification connectorSpecification) - throws JsonValidationException, IOException { - // actual validation is only for sanity checking - final JsonSchemaValidator validator = new JsonSchemaValidator(); - validator.ensure(connectorSpecification.getConnectionSpecification(), destination.getConfiguration()); - - final var previousDestinationConnection = getOptionalDestinationConnection(destination.getDestinationId()) - .map(DestinationConnection::getConfiguration); - - final var partialConfig = - statefulUpdateSecrets(destination.getWorkspaceId(), previousDestinationConnection, destination.getConfiguration(), connectorSpecification); - final var partialDestination = Jsons.clone(destination).withConfiguration(partialConfig); - - persistence.writeConfig(ConfigSchema.DESTINATION_CONNECTION, destination.getDestinationId().toString(), partialDestination); + /** + * MUST NOT ACCEPT SECRETS - Should only be called from { @link SecretsRepositoryWriter } + * + * Write a DestinationConnection to the database. The configuration of the Destination will be a + * partial configuration (no secrets, just pointer to the secrets store). + * + * @param partialDestination - The configuration of the Destination will be a partial configuration + * (no secrets, just pointer to the secrets store) + * @throws JsonValidationException - throws is the destination is invalid + * @throws IOException - you never know when you IO + */ + public void writeDestinationConnectionNoSecrets(final DestinationConnection partialDestination) throws JsonValidationException, IOException { + persistence.writeConfig(ConfigSchema.DESTINATION_CONNECTION, partialDestination.getDestinationId().toString(), partialDestination); } + /** + * Returns all destinations in the database. Does not contain secrets. To hydrate with secrets see + * { @link SecretsRepositoryReader#listDestinationConnectionWithSecrets() }. + * + * @return destinations + * @throws JsonValidationException - throws if returned destinations are invalid + * @throws IOException - you never know when you IO + */ public List listDestinationConnection() throws JsonValidationException, IOException { return persistence.listConfigs(ConfigSchema.DESTINATION_CONNECTION, DestinationConnection.class); } - public List listDestinationConnectionWithSecrets() throws JsonValidationException, IOException { - final var destinations = listDestinationConnection(); - - return destinations.stream() - .map(partialDestination -> Exceptions.toRuntime(() -> getDestinationConnectionWithSecrets(partialDestination.getDestinationId()))) - .collect(Collectors.toList()); - } - public StandardSync getStandardSync(final UUID connectionId) throws JsonValidationException, IOException, ConfigNotFoundException { return persistence.getConfig(ConfigSchema.STANDARD_SYNC, connectionId.toString(), StandardSync.class); } @@ -790,110 +690,43 @@ public int countDestinationsForWorkspace(final UUID workspaceId) throws IOExcept } /** - * Converts between a dumpConfig() output and a replaceAllConfigs() input, by deserializing the - * string/jsonnode into the AirbyteConfig, Stream<Object<AirbyteConfig.getClassName()>> + * MUST NOT ACCEPT SECRETS - Package private so that secrets are not accidentally passed in. Should + * only be called from { @link SecretsRepositoryWriter } * - * @param configurations from dumpConfig() - * @return input suitable for replaceAllConfigs() + * Takes as inputs configurations that it then uses to overwrite the contents of the existing Config + * Database. + * + * @param configs - configurations to load. + * @param dryRun - whether to test run of the load + * @throws IOException - you never know when you IO. */ - public static Map> deserialize(final Map> configurations) { - final Map> deserialized = new LinkedHashMap>(); - for (final String configSchemaName : configurations.keySet()) { - deserialized.put( - ConfigSchema.valueOf(configSchemaName), - configurations.get(configSchemaName).map(jsonNode -> Jsons.object(jsonNode, ConfigSchema.valueOf(configSchemaName).getClassName()))); - } - return deserialized; - } - - public void replaceAllConfigsDeserializing(final Map> configs, final boolean dryRun) throws IOException { - replaceAllConfigs(deserialize(configs), dryRun); + void replaceAllConfigsNoSecrets(final Map> configs, final boolean dryRun) throws IOException { + persistence.replaceAllConfigs(configs, dryRun); } - public void replaceAllConfigs(final Map> configs, final boolean dryRun) throws IOException { - if (longLivedSecretPersistence.isPresent()) { - final var augmentedMap = new HashMap<>(configs); - - // get all source defs so that we can use their specs when storing secrets. - @SuppressWarnings("unchecked") - final List sourceDefs = - (List) augmentedMap.get(ConfigSchema.STANDARD_SOURCE_DEFINITION).collect(Collectors.toList()); - // restore data in the map that gets consumed downstream. - augmentedMap.put(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefs.stream()); - final Map sourceDefIdToSpec = sourceDefs - .stream() - .collect(Collectors.toMap(StandardSourceDefinition::getSourceDefinitionId, StandardSourceDefinition::getSpec)); - - // get all destination defs so that we can use their specs when storing secrets. - @SuppressWarnings("unchecked") - final List destinationDefs = - (List) augmentedMap.get(ConfigSchema.STANDARD_DESTINATION_DEFINITION).collect(Collectors.toList()); - augmentedMap.put(ConfigSchema.STANDARD_DESTINATION_DEFINITION, destinationDefs.stream()); - final Map destinationDefIdToSpec = destinationDefs - .stream() - .collect(Collectors.toMap(StandardDestinationDefinition::getDestinationDefinitionId, StandardDestinationDefinition::getSpec)); - - if (augmentedMap.containsKey(ConfigSchema.SOURCE_CONNECTION)) { - final Stream augmentedValue = augmentedMap.get(ConfigSchema.SOURCE_CONNECTION) - .map(config -> { - final SourceConnection source = (SourceConnection) config; - - if (!sourceDefIdToSpec.containsKey(source.getSourceDefinitionId())) { - throw new RuntimeException(new ConfigNotFoundException(ConfigSchema.STANDARD_SOURCE_DEFINITION, source.getSourceDefinitionId())); - } - - final var connectionConfig = - statefulSplitSecrets(source.getWorkspaceId(), source.getConfiguration(), sourceDefIdToSpec.get(source.getSourceDefinitionId())); - - return source.withConfiguration(connectionConfig); - }); - augmentedMap.put(ConfigSchema.SOURCE_CONNECTION, augmentedValue); - } - - if (augmentedMap.containsKey(ConfigSchema.DESTINATION_CONNECTION)) { - final Stream augmentedValue = augmentedMap.get(ConfigSchema.DESTINATION_CONNECTION) - .map(config -> { - final DestinationConnection destination = (DestinationConnection) config; - - if (!destinationDefIdToSpec.containsKey(destination.getDestinationDefinitionId())) { - throw new RuntimeException( - new ConfigNotFoundException(ConfigSchema.STANDARD_DESTINATION_DEFINITION, destination.getDestinationDefinitionId())); - } - - final var connectionConfig = statefulSplitSecrets(destination.getWorkspaceId(), destination.getConfiguration(), - destinationDefIdToSpec.get(destination.getDestinationDefinitionId())); - - return destination.withConfiguration(connectionConfig); - }); - augmentedMap.put(ConfigSchema.DESTINATION_CONNECTION, augmentedValue); - } - - persistence.replaceAllConfigs(augmentedMap, dryRun); - } else { - persistence.replaceAllConfigs(configs, dryRun); - } - } - - public Map> dumpConfigs() throws IOException { - final var map = new HashMap<>(persistence.dumpConfigs()); - final var sourceKey = ConfigSchema.SOURCE_CONNECTION.name(); - final var destinationKey = ConfigSchema.DESTINATION_CONNECTION.name(); - - if (map.containsKey(sourceKey)) { - final Stream augmentedValue = map.get(sourceKey).map(secretsHydrator::hydrate); - map.put(sourceKey, augmentedValue); - } - - if (map.containsKey(destinationKey)) { - final Stream augmentedValue = map.get(destinationKey).map(secretsHydrator::hydrate); - map.put(destinationKey, augmentedValue); - } - - return map; + /** + * Dumps all configurations in the Config Database. Note: It will not contain secrets as the Config + * Database does not contain connector configurations that include secrets. In order to hydrate with + * secrets see { @link SecretsRepositoryReader#dumpConfigs() }. + * + * @return all configurations in the Config Database + * @throws IOException - you never know when you IO + */ + public Map> dumpConfigsNoSecrets() throws IOException { + return persistence.dumpConfigs(); } - public void loadData(final ConfigPersistence seedPersistence) throws IOException { - persistence.loadData(seedPersistence); + /** + * MUST NOT ACCEPT SECRETS - Package private so that secrets are not accidentally passed in. Should + * only be called from { @link SecretsRepositoryWriter } + * + * Loads all Data from a ConfigPersistence into the database. + * + * @param seedPersistenceWithoutSecrets - seed persistence WITHOUT secrets + * @throws IOException - you never know when you IO + */ + public void loadDataNoSecrets(final ConfigPersistence seedPersistenceWithoutSecrets) throws IOException { + persistence.loadData(seedPersistenceWithoutSecrets); } } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java new file mode 100644 index 000000000000..525743e55907 --- /dev/null +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.lang.Exceptions; +import io.airbyte.config.ConfigSchema; +import io.airbyte.config.DestinationConnection; +import io.airbyte.config.SourceConnection; +import io.airbyte.config.persistence.split_secrets.SecretsHydrator; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class is responsible for fetching both connectors and their secrets (from separate secrets + * stores). All methods in this class return secrets! Use it carefully. + */ +public class SecretsRepositoryReader { + + private static final Logger LOGGER = LoggerFactory.getLogger(SecretsRepositoryReader.class); + + private final ConfigRepository configRepository; + private final SecretsHydrator secretsHydrator; + + public SecretsRepositoryReader(final ConfigRepository configRepository, + final SecretsHydrator secretsHydrator) { + this.configRepository = configRepository; + this.secretsHydrator = secretsHydrator; + } + + public SourceConnection getSourceConnectionWithSecrets(final UUID sourceId) throws JsonValidationException, IOException, ConfigNotFoundException { + final var source = configRepository.getSourceConnection(sourceId); + return hydrateSourcePartialConfig(source); + } + + public List listSourceConnectionWithSecrets() throws JsonValidationException, IOException { + final var sources = configRepository.listSourceConnection(); + + return sources + .stream() + .map(partialSource -> Exceptions.toRuntime(() -> hydrateSourcePartialConfig(partialSource))) + .collect(Collectors.toList()); + } + + public DestinationConnection getDestinationConnectionWithSecrets(final UUID destinationId) + throws JsonValidationException, IOException, ConfigNotFoundException { + final var destination = configRepository.getDestinationConnection(destinationId); + return hydrateDestinationPartialConfig(destination); + } + + public List listDestinationConnectionWithSecrets() throws JsonValidationException, IOException { + final var destinations = configRepository.listDestinationConnection(); + + return destinations + .stream() + .map(partialDestination -> Exceptions.toRuntime(() -> hydrateDestinationPartialConfig(partialDestination))) + .collect(Collectors.toList()); + } + + public Map> dumpConfigsWithSecrets() throws IOException { + final Map> dump = new HashMap<>(configRepository.dumpConfigsNoSecrets()); + final String sourceKey = ConfigSchema.SOURCE_CONNECTION.name(); + final String destinationKey = ConfigSchema.DESTINATION_CONNECTION.name(); + + hydrateValuesIfKeyPresent(sourceKey, dump); + hydrateValuesIfKeyPresent(destinationKey, dump); + + return dump; + } + + private SourceConnection hydrateSourcePartialConfig(final SourceConnection sourceWithPartialConfig) { + final JsonNode hydratedConfig = secretsHydrator.hydrate(sourceWithPartialConfig.getConfiguration()); + return Jsons.clone(sourceWithPartialConfig).withConfiguration(hydratedConfig); + } + + private DestinationConnection hydrateDestinationPartialConfig(final DestinationConnection sourceWithPartialConfig) { + final JsonNode hydratedConfig = secretsHydrator.hydrate(sourceWithPartialConfig.getConfiguration()); + return Jsons.clone(sourceWithPartialConfig).withConfiguration(hydratedConfig); + } + + private void hydrateValuesIfKeyPresent(final String key, final Map> dump) { + if (dump.containsKey(key)) { + final Stream augmentedValue = dump.get(key).map(secretsHydrator::hydrate); + dump.put(key, augmentedValue); + } + } + +} diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryWriter.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryWriter.java new file mode 100644 index 000000000000..8049312fe2d6 --- /dev/null +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryWriter.java @@ -0,0 +1,267 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.AirbyteConfig; +import io.airbyte.config.ConfigSchema; +import io.airbyte.config.DestinationConnection; +import io.airbyte.config.SourceConnection; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.persistence.split_secrets.SecretPersistence; +import io.airbyte.config.persistence.split_secrets.SecretsHelpers; +import io.airbyte.config.persistence.split_secrets.SplitSecretConfig; +import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.validation.json.JsonSchemaValidator; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class takes secrets as arguments but never returns a secrets as return values (even the ones + * that are passed in as arguments). It is responsible for writing connector secrets to the correct + * secrets store and then making sure the remainder of the configuration is written to the Config + * Database. + */ +@SuppressWarnings("OptionalUsedAsFieldOrParameterType") +public class SecretsRepositoryWriter { + + private static final Logger LOGGER = LoggerFactory.getLogger(SecretsRepositoryWriter.class); + + private static final UUID NO_WORKSPACE = UUID.fromString("00000000-0000-0000-0000-000000000000"); + + private final ConfigRepository configRepository; + private final JsonSchemaValidator validator; + private final Optional longLivedSecretPersistence; + private final Optional ephemeralSecretPersistence; + + public SecretsRepositoryWriter(final ConfigRepository configRepository, + final Optional longLivedSecretPersistence, + final Optional ephemeralSecretPersistence) { + this(configRepository, new JsonSchemaValidator(), longLivedSecretPersistence, ephemeralSecretPersistence); + } + + @VisibleForTesting + SecretsRepositoryWriter(final ConfigRepository configRepository, + final JsonSchemaValidator validator, + final Optional longLivedSecretPersistence, + final Optional ephemeralSecretPersistence) { + this.configRepository = configRepository; + this.validator = validator; + this.longLivedSecretPersistence = longLivedSecretPersistence; + this.ephemeralSecretPersistence = ephemeralSecretPersistence; + } + + private Optional getSourceIfExists(final UUID sourceId) throws JsonValidationException, IOException { + try { + return Optional.of(configRepository.getSourceConnection(sourceId)); + } catch (final ConfigNotFoundException e) { + return Optional.empty(); + } + } + + // validates too! + public void writeSourceConnection(final SourceConnection source, final ConnectorSpecification connectorSpecification) + throws JsonValidationException, IOException { + final var previousSourceConnection = getSourceIfExists(source.getSourceId()) + .map(SourceConnection::getConfiguration); + + // strip secrets + final JsonNode partialConfig = statefulUpdateSecrets( + source.getWorkspaceId(), + previousSourceConnection, + source.getConfiguration(), + connectorSpecification); + final SourceConnection partialSource = Jsons.clone(source).withConfiguration(partialConfig); + + configRepository.writeSourceConnectionNoSecrets(partialSource); + } + + private Optional getDestinationIfExists(final UUID destinationId) throws JsonValidationException, IOException { + try { + return Optional.of(configRepository.getDestinationConnection(destinationId)); + } catch (final ConfigNotFoundException e) { + return Optional.empty(); + } + } + + public void writeDestinationConnection(final DestinationConnection destination, final ConnectorSpecification connectorSpecification) + throws JsonValidationException, IOException { + final var previousDestinationConnection = getDestinationIfExists(destination.getDestinationId()) + .map(DestinationConnection::getConfiguration); + + final JsonNode partialConfig = statefulUpdateSecrets( + destination.getWorkspaceId(), + previousDestinationConnection, + destination.getConfiguration(), + connectorSpecification); + final DestinationConnection partialDestination = Jsons.clone(destination).withConfiguration(partialConfig); + + configRepository.writeDestinationConnectionNoSecrets(partialDestination); + } + + /** + * Detects secrets in the configuration. Writes them to the secrets store. It returns the config + * stripped of secrets (replaced with pointers to the secrets store). + * + * @param workspaceId workspace id for the config + * @param fullConfig full config + * @param spec connector specification + * @return partial config + */ + private JsonNode statefulSplitSecrets(final UUID workspaceId, final JsonNode fullConfig, final ConnectorSpecification spec) { + return splitSecretConfig(workspaceId, fullConfig, spec, longLivedSecretPersistence); + } + + // todo (cgardens) - the contract on this method is hard to follow, because it sometimes returns + // secrets (i.e. when there is no longLivedSecretPersistence). If we treated all secrets the same + // (i.e. used a separate db for secrets when the user didn't provide a store), this would be easier + // to reason about. + /** + * If a secrets store is present, this method attempts to fetch the existing config and merge its + * secrets with the passed in config. If there is no secrets store, it just returns the passed in + * config. Also validates the config. + * + * @param workspaceId workspace id for the config + * @param oldConfig old full config + * @param fullConfig new full config + * @param spec connector specification + * @return partial config + */ + private JsonNode statefulUpdateSecrets(final UUID workspaceId, + final Optional oldConfig, + final JsonNode fullConfig, + final ConnectorSpecification spec) + throws JsonValidationException { + validator.ensure(spec.getConnectionSpecification(), fullConfig); + + if (longLivedSecretPersistence.isPresent()) { + if (oldConfig.isPresent()) { + final var splitSecretConfig = SecretsHelpers.splitAndUpdateConfig( + workspaceId, + oldConfig.get(), + fullConfig, + spec, + longLivedSecretPersistence.get()); + + splitSecretConfig.getCoordinateToPayload().forEach(longLivedSecretPersistence.get()::write); + return splitSecretConfig.getPartialConfig(); + } else { + final var splitSecretConfig = SecretsHelpers.splitConfig( + workspaceId, + fullConfig, + spec); + + splitSecretConfig.getCoordinateToPayload().forEach(longLivedSecretPersistence.get()::write); + + return splitSecretConfig.getPartialConfig(); + } + } else { + return fullConfig; + } + } + + /** + * @param fullConfig full config + * @param spec connector specification + * @return partial config + */ + public JsonNode statefulSplitEphemeralSecrets(final JsonNode fullConfig, final ConnectorSpecification spec) { + return splitSecretConfig(NO_WORKSPACE, fullConfig, spec, ephemeralSecretPersistence); + } + + private JsonNode splitSecretConfig(final UUID workspaceId, + final JsonNode fullConfig, + final ConnectorSpecification spec, + final Optional secretPersistence) { + if (secretPersistence.isPresent()) { + final SplitSecretConfig splitSecretConfig = SecretsHelpers.splitConfig(workspaceId, fullConfig, spec); + splitSecretConfig.getCoordinateToPayload().forEach(secretPersistence.get()::write); + return splitSecretConfig.getPartialConfig(); + } else { + return fullConfig; + } + } + + public void replaceAllConfigs(final Map> configs, final boolean dryRun) throws IOException { + if (longLivedSecretPersistence.isPresent()) { + final var augmentedMap = new HashMap<>(configs); + + // get all source defs so that we can use their specs when storing secrets. + @SuppressWarnings("unchecked") + final List sourceDefs = + (List) augmentedMap.get(ConfigSchema.STANDARD_SOURCE_DEFINITION).collect(Collectors.toList()); + // restore data in the map that gets consumed downstream. + augmentedMap.put(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefs.stream()); + final Map sourceDefIdToSpec = sourceDefs + .stream() + .collect(Collectors.toMap(StandardSourceDefinition::getSourceDefinitionId, StandardSourceDefinition::getSpec)); + + // get all destination defs so that we can use their specs when storing secrets. + @SuppressWarnings("unchecked") + final List destinationDefs = + (List) augmentedMap.get(ConfigSchema.STANDARD_DESTINATION_DEFINITION).collect(Collectors.toList()); + augmentedMap.put(ConfigSchema.STANDARD_DESTINATION_DEFINITION, destinationDefs.stream()); + final Map destinationDefIdToSpec = destinationDefs + .stream() + .collect(Collectors.toMap(StandardDestinationDefinition::getDestinationDefinitionId, StandardDestinationDefinition::getSpec)); + + if (augmentedMap.containsKey(ConfigSchema.SOURCE_CONNECTION)) { + final Stream augmentedValue = augmentedMap.get(ConfigSchema.SOURCE_CONNECTION) + .map(config -> { + final SourceConnection source = (SourceConnection) config; + + if (!sourceDefIdToSpec.containsKey(source.getSourceDefinitionId())) { + throw new RuntimeException(new ConfigNotFoundException(ConfigSchema.STANDARD_SOURCE_DEFINITION, source.getSourceDefinitionId())); + } + + final var partialConfig = statefulSplitSecrets( + source.getWorkspaceId(), + source.getConfiguration(), + sourceDefIdToSpec.get(source.getSourceDefinitionId())); + + return source.withConfiguration(partialConfig); + }); + augmentedMap.put(ConfigSchema.SOURCE_CONNECTION, augmentedValue); + } + + if (augmentedMap.containsKey(ConfigSchema.DESTINATION_CONNECTION)) { + final Stream augmentedValue = augmentedMap.get(ConfigSchema.DESTINATION_CONNECTION) + .map(config -> { + final DestinationConnection destination = (DestinationConnection) config; + + if (!destinationDefIdToSpec.containsKey(destination.getDestinationDefinitionId())) { + throw new RuntimeException( + new ConfigNotFoundException(ConfigSchema.STANDARD_DESTINATION_DEFINITION, destination.getDestinationDefinitionId())); + } + + final var partialConfig = statefulSplitSecrets( + destination.getWorkspaceId(), + destination.getConfiguration(), + destinationDefIdToSpec.get(destination.getDestinationDefinitionId())); + + return destination.withConfiguration(partialConfig); + }); + augmentedMap.put(ConfigSchema.DESTINATION_CONNECTION, augmentedValue); + } + + configRepository.replaceAllConfigsNoSecrets(augmentedMap, dryRun); + } else { + configRepository.replaceAllConfigsNoSecrets(configs, dryRun); + } + } + +} diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/MemorySecretPersistence.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/MemorySecretPersistence.java index 51dddfc7bf1d..161c2494363f 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/MemorySecretPersistence.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/MemorySecretPersistence.java @@ -25,4 +25,8 @@ public void write(final SecretCoordinate coordinate, final String payload) { secretMap.put(coordinate, payload); } + public Map getMap() { + return new HashMap<>(secretMap); + } + } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java index 0e86485caf0d..579ec254dd65 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java @@ -60,7 +60,12 @@ public class SecretsHelpers { public static SplitSecretConfig splitConfig(final UUID workspaceId, final JsonNode fullConfig, final ConnectorSpecification spec) { - return internalSplitAndUpdateConfig(UUID::randomUUID, workspaceId, (coordinate) -> Optional.empty(), Jsons.emptyObject(), fullConfig, + return internalSplitAndUpdateConfig( + UUID::randomUUID, + workspaceId, + (coordinate) -> Optional.empty(), + Jsons.emptyObject(), + fullConfig, spec.getConnectionSpecification()); } @@ -87,7 +92,12 @@ public static SplitSecretConfig splitAndUpdateConfig(final UUID workspaceId, final JsonNode newFullConfig, final ConnectorSpecification spec, final ReadOnlySecretPersistence secretReader) { - return internalSplitAndUpdateConfig(UUID::randomUUID, workspaceId, secretReader, oldPartialConfig, newFullConfig, + return internalSplitAndUpdateConfig( + UUID::randomUUID, + workspaceId, + secretReader, + oldPartialConfig, + newFullConfig, spec.getConnectionSpecification()); } @@ -332,7 +342,7 @@ private static TextNode getOrThrowSecretValueNode(final ReadOnlySecretPersistenc final var secretValue = secretPersistence.read(coordinate); if (secretValue.isEmpty()) { - throw new RuntimeException("That secret was not found in the store!"); + throw new RuntimeException(String.format("That secret was not found in the store! Coordinate: %s", coordinate.getFullCoordinate())); } return new TextNode(secretValue.get()); diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java index 93c0d71e21e0..a83dd8868054 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java @@ -18,8 +18,6 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; -import io.airbyte.config.persistence.split_secrets.NoOpSecretsHydrator; import io.airbyte.db.Database; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; @@ -27,7 +25,6 @@ import io.airbyte.db.instance.development.MigrationDevHelper; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.CatalogHelpers; -import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.validation.json.JsonValidationException; @@ -60,12 +57,9 @@ public static void dbSetup() { @BeforeEach void setup() throws IOException, JsonValidationException { - final var secretPersistence = new MemorySecretPersistence(); database = new ConfigsDatabaseInstance(container.getUsername(), container.getPassword(), container.getJdbcUrl()).getAndInitialize(); configPersistence = spy(new DatabaseConfigPersistence(database)); - configRepository = - spy(new ConfigRepository(configPersistence, new NoOpSecretsHydrator(), Optional.of(secretPersistence), Optional.of(secretPersistence), - database)); + configRepository = spy(new ConfigRepository(configPersistence, database)); final ConfigsDatabaseMigrator configsDatabaseMigrator = new ConfigsDatabaseMigrator(database, DatabaseConfigPersistenceLoadDataTest.class.getName()); final DevDatabaseMigrator devDatabaseMigrator = new DevDatabaseMigrator(configsDatabaseMigrator); @@ -79,13 +73,11 @@ void setup() throws IOException, JsonValidationException { for (final StandardDestinationDefinition destinationDefinition : MockData.standardDestinationDefinitions()) { configRepository.writeStandardDestinationDefinition(destinationDefinition); } - final ConnectorSpecification specification = new ConnectorSpecification() - .withConnectionSpecification(Jsons.deserialize("{}")); - for (final SourceConnection connection : MockData.sourceConnections()) { - configRepository.writeSourceConnection(connection, specification); + for (final SourceConnection source : MockData.sourceConnections()) { + configRepository.writeSourceConnectionNoSecrets(source); } - for (final DestinationConnection connection : MockData.destinationConnections()) { - configRepository.writeDestinationConnection(connection, specification); + for (final DestinationConnection destination : MockData.destinationConnections()) { + configRepository.writeDestinationConnectionNoSecrets(destination); } for (final StandardSyncOperation operation : MockData.standardSyncOperations()) { configRepository.writeStandardSyncOperation(operation); @@ -128,9 +120,7 @@ void testSimpleInsertActorCatalog() throws IOException, JsonValidationException, .withName("SomeConnector") .withWorkspaceId(workspace.getWorkspaceId()) .withConfiguration(Jsons.deserialize("{}")); - final ConnectorSpecification specification = new ConnectorSpecification() - .withConnectionSpecification(Jsons.deserialize("{}")); - configRepository.writeSourceConnection(source, specification); + configRepository.writeSourceConnectionNoSecrets(source); final AirbyteCatalog actorCatalog = CatalogHelpers.createAirbyteCatalog("clothes", Field.of("name", JsonSchemaType.STRING)); configRepository.writeActorCatalogFetchEvent( diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java index 9a35b2f9eb31..48fb7a0cf81e 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryTest.java @@ -25,8 +25,6 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; -import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; -import io.airbyte.config.persistence.split_secrets.NoOpSecretsHydrator; import io.airbyte.db.Database; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -54,10 +52,7 @@ class ConfigRepositoryTest { void setup() { configPersistence = mock(ConfigPersistence.class); database = mock(Database.class); - final var secretPersistence = new MemorySecretPersistence(); - configRepository = - spy(new ConfigRepository(configPersistence, new NoOpSecretsHydrator(), Optional.of(secretPersistence), Optional.of(secretPersistence), - database)); + configRepository = spy(new ConfigRepository(configPersistence, database)); } @AfterEach diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java new file mode 100644 index 000000000000..4d9c2211ac37 --- /dev/null +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.ConfigSchema; +import io.airbyte.config.DestinationConnection; +import io.airbyte.config.SourceConnection; +import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; +import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; +import io.airbyte.config.persistence.split_secrets.SecretCoordinate; +import io.airbyte.config.persistence.split_secrets.SecretsHydrator; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class SecretsRepositoryReaderTest { + + private static final UUID UUID1 = UUID.randomUUID(); + + private static final SecretCoordinate COORDINATE = new SecretCoordinate("pointer", 2); + private static final String SECRET = "abc"; + private static final JsonNode PARTIAL_CONFIG = + Jsons.deserialize(String.format("{ \"username\": \"airbyte\", \"password\": { \"_secret\": \"%s\" } }", COORDINATE.getFullCoordinate())); + private static final JsonNode FULL_CONFIG = Jsons.deserialize(String.format("{ \"username\": \"airbyte\", \"password\": \"%s\"}", SECRET)); + + private static final SourceConnection SOURCE_WITH_PARTIAL_CONFIG = new SourceConnection() + .withSourceId(UUID1) + .withConfiguration(PARTIAL_CONFIG); + private static final SourceConnection SOURCE_WITH_FULL_CONFIG = Jsons.clone(SOURCE_WITH_PARTIAL_CONFIG) + .withConfiguration(FULL_CONFIG); + + private static final DestinationConnection DESTINATION_WITH_PARTIAL_CONFIG = new DestinationConnection() + .withDestinationId(UUID1) + .withConfiguration(PARTIAL_CONFIG); + private static final DestinationConnection DESTINATION_WITH_FULL_CONFIG = Jsons.clone(DESTINATION_WITH_PARTIAL_CONFIG) + .withConfiguration(FULL_CONFIG); + + private ConfigRepository configRepository; + private SecretsRepositoryReader secretsRepositoryReader; + private MemorySecretPersistence secretPersistence; + + @BeforeEach + void setup() { + configRepository = mock(ConfigRepository.class); + secretPersistence = new MemorySecretPersistence(); + final SecretsHydrator secretsHydrator = new RealSecretsHydrator(secretPersistence); + secretsRepositoryReader = new SecretsRepositoryReader(configRepository, secretsHydrator); + } + + @Test + void testGetSourceWithSecrets() throws JsonValidationException, ConfigNotFoundException, IOException { + secretPersistence.write(COORDINATE, SECRET); + when(configRepository.getSourceConnection(UUID1)).thenReturn(SOURCE_WITH_PARTIAL_CONFIG); + assertEquals(SOURCE_WITH_FULL_CONFIG, secretsRepositoryReader.getSourceConnectionWithSecrets(UUID1)); + } + + @Test + void testListSourcesWithSecrets() throws JsonValidationException, IOException { + secretPersistence.write(COORDINATE, SECRET); + when(configRepository.listSourceConnection()).thenReturn(List.of(SOURCE_WITH_PARTIAL_CONFIG)); + assertEquals(List.of(SOURCE_WITH_FULL_CONFIG), secretsRepositoryReader.listSourceConnectionWithSecrets()); + } + + @Test + void testGetDestinationWithSecrets() throws JsonValidationException, ConfigNotFoundException, IOException { + secretPersistence.write(COORDINATE, SECRET); + when(configRepository.getDestinationConnection(UUID1)).thenReturn(DESTINATION_WITH_PARTIAL_CONFIG); + assertEquals(DESTINATION_WITH_FULL_CONFIG, secretsRepositoryReader.getDestinationConnectionWithSecrets(UUID1)); + } + + @Test + void testListDestinationsWithSecrets() throws JsonValidationException, IOException { + secretPersistence.write(COORDINATE, SECRET); + when(configRepository.listDestinationConnection()).thenReturn(List.of(DESTINATION_WITH_PARTIAL_CONFIG)); + assertEquals(List.of(DESTINATION_WITH_FULL_CONFIG), secretsRepositoryReader.listDestinationConnectionWithSecrets()); + } + + @Test + void testDumpConfigsWithSecrets() throws IOException { + secretPersistence.write(COORDINATE, SECRET); + final StandardWorkspace workspace = new StandardWorkspace().withWorkspaceId(UUID.randomUUID()); + + final Map> dumpFromConfigRepository = new HashMap<>(); + dumpFromConfigRepository.put(ConfigSchema.STANDARD_WORKSPACE.name(), Stream.of(Jsons.jsonNode(workspace))); + dumpFromConfigRepository.put(ConfigSchema.SOURCE_CONNECTION.name(), Stream.of(Jsons.jsonNode(SOURCE_WITH_PARTIAL_CONFIG))); + dumpFromConfigRepository.put(ConfigSchema.DESTINATION_CONNECTION.name(), Stream.of(Jsons.jsonNode(DESTINATION_WITH_PARTIAL_CONFIG))); + when(configRepository.dumpConfigsNoSecrets()).thenReturn(dumpFromConfigRepository); + + final Map> expected = new HashMap<>(); + expected.put(ConfigSchema.STANDARD_WORKSPACE.name(), List.of(Jsons.jsonNode(workspace))); + expected.put(ConfigSchema.SOURCE_CONNECTION.name(), List.of(Jsons.jsonNode(SOURCE_WITH_FULL_CONFIG))); + expected.put(ConfigSchema.DESTINATION_CONNECTION.name(), List.of(Jsons.jsonNode(DESTINATION_WITH_FULL_CONFIG))); + + final Map> actual = secretsRepositoryReader.dumpConfigsWithSecrets() + .entrySet() + .stream() + .collect(Collectors.toMap(Entry::getKey, e -> e.getValue().collect(Collectors.toList()))); + + assertEquals(expected, actual); + } + +} diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java new file mode 100644 index 000000000000..5cac50b2552e --- /dev/null +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java @@ -0,0 +1,215 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.AirbyteConfig; +import io.airbyte.config.ConfigSchema; +import io.airbyte.config.DestinationConnection; +import io.airbyte.config.SourceConnection; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; +import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; +import io.airbyte.config.persistence.split_secrets.SecretCoordinate; +import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; + +class SecretsRepositoryWriterTest { + + private static final UUID UUID1 = UUID.randomUUID(); + + private static final ConnectorSpecification SPEC = new ConnectorSpecification() + .withConnectionSpecification(Jsons.deserialize( + "{ \"properties\": { \"username\": { \"type\": \"string\" }, \"password\": { \"type\": \"string\", \"airbyte_secret\": true } } }")); + + private static final String SECRET = "abc"; + private static final JsonNode FULL_CONFIG = Jsons.deserialize(String.format("{ \"username\": \"airbyte\", \"password\": \"%s\"}", SECRET)); + + private static final SourceConnection SOURCE_WITH_FULL_CONFIG = new SourceConnection() + .withSourceId(UUID1) + .withSourceDefinitionId(UUID.randomUUID()) + .withConfiguration(FULL_CONFIG); + + private static final DestinationConnection DESTINATION_WITH_FULL_CONFIG = new DestinationConnection() + .withDestinationId(UUID1) + .withConfiguration(FULL_CONFIG); + + private static final StandardSourceDefinition SOURCE_DEF = new StandardSourceDefinition() + .withSourceDefinitionId(SOURCE_WITH_FULL_CONFIG.getSourceDefinitionId()) + .withSpec(SPEC); + + private static final StandardDestinationDefinition DEST_DEF = new StandardDestinationDefinition() + .withDestinationDefinitionId(DESTINATION_WITH_FULL_CONFIG.getDestinationDefinitionId()) + .withSpec(SPEC); + + private ConfigRepository configRepository; + private MemorySecretPersistence longLivedSecretPersistence; + private MemorySecretPersistence ephemeralSecretPersistence; + private SecretsRepositoryWriter secretsRepositoryWriter; + + private RealSecretsHydrator longLivedSecretsHydrator; + private SecretsRepositoryReader longLivedSecretsRepositoryReader; + private RealSecretsHydrator ephemeralSecretsHydrator; + private SecretsRepositoryReader ephemeralSecretsRepositoryReader; + + @BeforeEach + void setup() { + configRepository = spy(mock(ConfigRepository.class)); + longLivedSecretPersistence = new MemorySecretPersistence(); + ephemeralSecretPersistence = new MemorySecretPersistence(); + + secretsRepositoryWriter = new SecretsRepositoryWriter( + configRepository, + Optional.of(longLivedSecretPersistence), + Optional.of(ephemeralSecretPersistence)); + + longLivedSecretsHydrator = new RealSecretsHydrator(longLivedSecretPersistence); + longLivedSecretsRepositoryReader = new SecretsRepositoryReader(configRepository, longLivedSecretsHydrator); + + ephemeralSecretsHydrator = new RealSecretsHydrator(ephemeralSecretPersistence); + ephemeralSecretsRepositoryReader = new SecretsRepositoryReader(configRepository, ephemeralSecretsHydrator); + } + + @Test + void testWriteSourceConnection() throws JsonValidationException, IOException, ConfigNotFoundException { + doThrow(ConfigNotFoundException.class).when(configRepository).getSourceConnection(UUID1); + + secretsRepositoryWriter.writeSourceConnection(SOURCE_WITH_FULL_CONFIG, SPEC); + final SecretCoordinate coordinate = getCoordinateFromSecretsStore(longLivedSecretPersistence); + + assertNotNull(coordinate); + final SourceConnection partialSource = injectCoordinateIntoSource(coordinate.getFullCoordinate()); + verify(configRepository).writeSourceConnectionNoSecrets(partialSource); + final Optional persistedSecret = longLivedSecretPersistence.read(coordinate); + assertTrue(persistedSecret.isPresent()); + assertEquals(SECRET, persistedSecret.get()); + + // verify that the round trip works. + reset(configRepository); + when(configRepository.getSourceConnection(UUID1)).thenReturn(partialSource); + assertEquals(SOURCE_WITH_FULL_CONFIG, longLivedSecretsRepositoryReader.getSourceConnectionWithSecrets(UUID1)); + } + + @Test + void testWriteDestinationConnection() throws JsonValidationException, IOException, ConfigNotFoundException { + doThrow(ConfigNotFoundException.class).when(configRepository).getDestinationConnection(UUID1); + + secretsRepositoryWriter.writeDestinationConnection(DESTINATION_WITH_FULL_CONFIG, SPEC); + final SecretCoordinate coordinate = getCoordinateFromSecretsStore(longLivedSecretPersistence); + + assertNotNull(coordinate); + final DestinationConnection partialDestination = injectCoordinateIntoDestination(coordinate.getFullCoordinate()); + verify(configRepository).writeDestinationConnectionNoSecrets(partialDestination); + final Optional persistedSecret = longLivedSecretPersistence.read(coordinate); + assertTrue(persistedSecret.isPresent()); + assertEquals(SECRET, persistedSecret.get()); + + // verify that the round trip works. + reset(configRepository); + when(configRepository.getDestinationConnection(UUID1)).thenReturn(partialDestination); + assertEquals(DESTINATION_WITH_FULL_CONFIG, longLivedSecretsRepositoryReader.getDestinationConnectionWithSecrets(UUID1)); + } + + @Test + void testStatefulSplitEphemeralSecrets() throws JsonValidationException, IOException, ConfigNotFoundException { + final JsonNode split = secretsRepositoryWriter.statefulSplitEphemeralSecrets( + SOURCE_WITH_FULL_CONFIG.getConfiguration(), + SPEC); + final SecretCoordinate coordinate = getCoordinateFromSecretsStore(ephemeralSecretPersistence); + + assertNotNull(coordinate); + final Optional persistedSecret = ephemeralSecretPersistence.read(coordinate); + assertTrue(persistedSecret.isPresent()); + assertEquals(SECRET, persistedSecret.get()); + + // verify that the round trip works. + assertEquals(SOURCE_WITH_FULL_CONFIG.getConfiguration(), ephemeralSecretsHydrator.hydrate(split)); + } + + @SuppressWarnings("unchecked") + @Test + void testReplaceAllConfigs() throws IOException { + final Map> configs = new HashMap<>(); + configs.put(ConfigSchema.STANDARD_SOURCE_DEFINITION, Stream.of(Jsons.clone(SOURCE_DEF))); + configs.put(ConfigSchema.STANDARD_DESTINATION_DEFINITION, Stream.of(Jsons.clone(DEST_DEF))); + configs.put(ConfigSchema.SOURCE_CONNECTION, Stream.of(Jsons.clone(SOURCE_WITH_FULL_CONFIG))); + configs.put(ConfigSchema.DESTINATION_CONNECTION, Stream.of(Jsons.clone(DESTINATION_WITH_FULL_CONFIG))); + + secretsRepositoryWriter.replaceAllConfigs(configs, false); + + final ArgumentCaptor>> argument = ArgumentCaptor.forClass(Map.class); + verify(configRepository).replaceAllConfigsNoSecrets(argument.capture(), eq(false)); + final Map> actual = argument.getValue().entrySet() + .stream() + .collect(Collectors.toMap(Entry::getKey, e -> e.getValue().collect(Collectors.toList()))); + + assertEquals(SOURCE_DEF, actual.get(ConfigSchema.STANDARD_SOURCE_DEFINITION).get(0)); + assertEquals(DEST_DEF, actual.get(ConfigSchema.STANDARD_DESTINATION_DEFINITION).get(0)); + + // we can't easily get the pointer, so verify the secret has been stripped out and then make sure + // the rest of the object meets expectations. + final SourceConnection actualSource = (SourceConnection) actual.get(ConfigSchema.SOURCE_CONNECTION).get(0); + assertTrue(actualSource.getConfiguration().get("password").has("_secret")); + ((ObjectNode) actualSource.getConfiguration()).remove("password"); + final SourceConnection expectedSource = Jsons.clone(SOURCE_WITH_FULL_CONFIG); + ((ObjectNode) expectedSource.getConfiguration()).remove("password"); + assertEquals(expectedSource, actualSource); + + final DestinationConnection actualDest = (DestinationConnection) actual.get(ConfigSchema.DESTINATION_CONNECTION).get(0); + assertTrue(actualDest.getConfiguration().get("password").has("_secret")); + ((ObjectNode) actualDest.getConfiguration()).remove("password"); + final DestinationConnection expectedDest = Jsons.clone(DESTINATION_WITH_FULL_CONFIG); + ((ObjectNode) expectedDest.getConfiguration()).remove("password"); + assertEquals(expectedDest, actualDest); + } + + // this only works if the secrets store has one secret. + private SecretCoordinate getCoordinateFromSecretsStore(final MemorySecretPersistence secretPersistence) { + return secretPersistence.getMap() + .keySet() + .stream() + .findFirst() + .orElse(null); + } + + private static JsonNode injectCoordinate(final String coordinate) { + return Jsons.deserialize(String.format("{ \"username\": \"airbyte\", \"password\": { \"_secret\": \"%s\" } }", coordinate)); + } + + private static SourceConnection injectCoordinateIntoSource(final String coordinate) { + return Jsons.clone(SOURCE_WITH_FULL_CONFIG).withConfiguration(injectCoordinate(coordinate)); + } + + private static DestinationConnection injectCoordinateIntoDestination(final String coordinate) { + return Jsons.clone(DESTINATION_WITH_FULL_CONFIG).withConfiguration(injectCoordinate(coordinate)); + } + +} diff --git a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java index b01064fbf64e..75b129079bda 100644 --- a/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java +++ b/airbyte-scheduler/app/src/main/java/io/airbyte/scheduler/app/SchedulerApp.java @@ -24,8 +24,6 @@ import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.DatabaseConfigPersistence; -import io.airbyte.config.persistence.split_secrets.SecretPersistence; -import io.airbyte.config.persistence.split_secrets.SecretsHydrator; import io.airbyte.db.Database; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.jobs.JobsDatabaseInstance; @@ -47,7 +45,6 @@ import java.time.Duration; import java.time.Instant; import java.util.Map; -import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -252,11 +249,7 @@ public static void main(final String[] args) throws IOException, InterruptedExce configs.getConfigDatabaseUrl()) .getInitialized(); final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase); - final Optional secretPersistence = SecretPersistence.getLongLived(configs); - final Optional ephemeralSecretPersistence = SecretPersistence.getEphemeral(configs); - final SecretsHydrator secretsHydrator = SecretPersistence.getSecretsHydrator(configs); - final ConfigRepository configRepository = - new ConfigRepository(configPersistence, secretsHydrator, secretPersistence, ephemeralSecretPersistence, configDatabase); + final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); final JobPersistence jobPersistence = new DefaultJobPersistence(jobDatabase); final JobCleaner jobCleaner = new JobCleaner( diff --git a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java index 25c64bcf4c23..b54aaa796dcf 100644 --- a/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java +++ b/airbyte-scheduler/persistence/src/test/java/io/airbyte/scheduler/persistence/WorkspaceHelperTest.java @@ -26,7 +26,6 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.scheduler.models.Job; import io.airbyte.scheduler.models.JobStatus; import io.airbyte.validation.json.JsonValidationException; @@ -77,7 +76,6 @@ class WorkspaceHelperTest { ConfigRepository configRepository; JobPersistence jobPersistence; WorkspaceHelper workspaceHelper; - ConnectorSpecification emptyConnectorSpec; @BeforeEach public void setup() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -94,9 +92,6 @@ public void setup() throws IOException, JsonValidationException, ConfigNotFoundE when(configRepository.getStandardSyncOperation(not(eq(OPERATION_ID)))).thenThrow(ConfigNotFoundException.class); workspaceHelper = new WorkspaceHelper(configRepository, jobPersistence); - - emptyConnectorSpec = mock(ConnectorSpecification.class); - when(emptyConnectorSpec.getConnectionSpecification()).thenReturn(Jsons.emptyObject()); } @Test diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpExporter.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpExporter.java index 557d37a8ce9b..2f421ebcc7a4 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpExporter.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpExporter.java @@ -16,6 +16,7 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; import io.airbyte.validation.json.JsonValidationException; @@ -53,11 +54,16 @@ public class ConfigDumpExporter { private static final String CONFIG_FOLDER_NAME = "airbyte_config"; private static final String VERSION_FILE_NAME = "VERSION"; private final ConfigRepository configRepository; + private final SecretsRepositoryReader secretsRepositoryReader; private final JobPersistence jobPersistence; private final WorkspaceHelper workspaceHelper; - public ConfigDumpExporter(final ConfigRepository configRepository, final JobPersistence jobPersistence, final WorkspaceHelper workspaceHelper) { + public ConfigDumpExporter(final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final JobPersistence jobPersistence, + final WorkspaceHelper workspaceHelper) { this.configRepository = configRepository; + this.secretsRepositoryReader = secretsRepositoryReader; this.jobPersistence = jobPersistence; this.workspaceHelper = workspaceHelper; } @@ -83,7 +89,7 @@ private void exportVersionFile(final Path tempFolder) throws IOException { } private void dumpConfigsDatabase(final Path parentFolder) throws IOException { - for (final Map.Entry> configEntry : configRepository.dumpConfigs().entrySet()) { + for (final Map.Entry> configEntry : secretsRepositoryReader.dumpConfigsWithSecrets().entrySet()) { writeConfigsToArchive(parentFolder, configEntry.getKey(), configEntry.getValue()); } } @@ -132,7 +138,7 @@ private void exportConfigsDatabase(final Path parentFolder, final UUID workspace final Collection sourceConnections = writeConfigsToArchive( parentFolder, ConfigSchema.SOURCE_CONNECTION.name(), - configRepository::listSourceConnectionWithSecrets, + secretsRepositoryReader::listSourceConnectionWithSecrets, (sourceConnection) -> workspaceId.equals(sourceConnection.getWorkspaceId())); writeConfigsToArchive(parentFolder, ConfigSchema.STANDARD_SOURCE_DEFINITION.name(), () -> listSourceDefinition(sourceConnections), @@ -141,7 +147,7 @@ private void exportConfigsDatabase(final Path parentFolder, final UUID workspace final Collection destinationConnections = writeConfigsToArchive( parentFolder, ConfigSchema.DESTINATION_CONNECTION.name(), - configRepository::listDestinationConnectionWithSecrets, + secretsRepositoryReader::listDestinationConnectionWithSecrets, (destinationConnection) -> workspaceId.equals(destinationConnection.getWorkspaceId())); writeConfigsToArchive(parentFolder, ConfigSchema.STANDARD_DESTINATION_DEFINITION.name(), () -> listDestinationDefinition(destinationConnections), diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java index ceffb05cc920..0d8627198df8 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ConfigDumpImporter.java @@ -26,6 +26,7 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; @@ -66,20 +67,23 @@ public class ConfigDumpImporter { private static final Path TMP_AIRBYTE_STAGED_RESOURCES = Path.of("/tmp/airbyte_staged_resources"); private final ConfigRepository configRepository; + private final SecretsRepositoryWriter secretsRepositoryWriter; private final WorkspaceHelper workspaceHelper; private final JsonSchemaValidator jsonSchemaValidator; private final JobPersistence jobPersistence; private final boolean importDefinitions; public ConfigDumpImporter(final ConfigRepository configRepository, + final SecretsRepositoryWriter secretsRepositoryWriter, final JobPersistence jobPersistence, final WorkspaceHelper workspaceHelper, final boolean importDefinitions) { - this(configRepository, jobPersistence, workspaceHelper, new JsonSchemaValidator(), importDefinitions); + this(configRepository, secretsRepositoryWriter, jobPersistence, workspaceHelper, new JsonSchemaValidator(), importDefinitions); } @VisibleForTesting public ConfigDumpImporter(final ConfigRepository configRepository, + final SecretsRepositoryWriter secretsRepositoryWriter, final JobPersistence jobPersistence, final WorkspaceHelper workspaceHelper, final JsonSchemaValidator jsonSchemaValidator, @@ -87,6 +91,7 @@ public ConfigDumpImporter(final ConfigRepository configRepository, this.jsonSchemaValidator = jsonSchemaValidator; this.jobPersistence = jobPersistence; this.configRepository = configRepository; + this.secretsRepositoryWriter = secretsRepositoryWriter; this.workspaceHelper = workspaceHelper; this.importDefinitions = importDefinitions; } @@ -128,7 +133,7 @@ public void importDataWithSeed(final AirbyteVersion targetVersion, final File ar // 4. Import Configs and update connector definitions importConfigsFromArchive(sourceRoot, false); - configRepository.loadData(seedPersistence); + configRepository.loadDataNoSecrets(seedPersistence); // 5. Set DB version LOGGER.info("Setting the DB Airbyte version to : " + targetVersion); @@ -182,7 +187,7 @@ private void importConfigsFromArchive(final Path sourceRoot, final boolean dryRu final ConfigSchema configSchema = configSchemaOptional.get(); data.put(configSchema, readConfigsFromArchive(sourceRoot, configSchema)); } - configRepository.replaceAllConfigs(data, dryRun); + secretsRepositoryWriter.replaceAllConfigs(data, dryRun); } private Stream readConfigsFromArchive(final Path storageRoot, final ConfigSchema schemaType) @@ -362,7 +367,7 @@ private void importConfigsIntoWorkspace(final Path sourceRoot, final UUID wo if (sourceDefinition.getTombstone() != null && sourceDefinition.getTombstone()) { return; } - configRepository.writeSourceConnection(sourceConnection, sourceDefinition.getSpec()); + secretsRepositoryWriter.writeSourceConnection(sourceConnection, sourceDefinition.getSpec()); } catch (final ConfigNotFoundException e) { return; } @@ -393,7 +398,7 @@ private void importConfigsIntoWorkspace(final Path sourceRoot, final UUID wo if (destinationDefinition.getTombstone() != null && destinationDefinition.getTombstone()) { return; } - configRepository.writeDestinationConnection(destinationConnection, destinationDefinition.getSpec()); + secretsRepositoryWriter.writeDestinationConnection(destinationConnection, destinationDefinition.getSpec()); } catch (final ConfigNotFoundException e) { return; } diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java index 9139c8274b8e..53ab8a3ea48d 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java @@ -12,6 +12,8 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SchedulerJobClient; @@ -32,6 +34,8 @@ public class ConfigurationApiFactory implements Factory { private static ConfigRepository configRepository; private static JobPersistence jobPersistence; private static ConfigPersistence seed; + private static SecretsRepositoryReader secretsRepositoryReader; + private static SecretsRepositoryWriter secretsRepositoryWriter; private static SchedulerJobClient schedulerJobClient; private static SynchronousSchedulerClient synchronousSchedulerClient; private static FileTtlManager archiveTtlManager; @@ -52,6 +56,8 @@ public class ConfigurationApiFactory implements Factory { public static void setValues( final WorkflowServiceStubs temporalService, final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final JobPersistence jobPersistence, final ConfigPersistence seed, final SchedulerJobClient schedulerJobClient, @@ -73,6 +79,8 @@ public static void setValues( ConfigurationApiFactory.configRepository = configRepository; ConfigurationApiFactory.jobPersistence = jobPersistence; ConfigurationApiFactory.seed = seed; + ConfigurationApiFactory.secretsRepositoryReader = secretsRepositoryReader; + ConfigurationApiFactory.secretsRepositoryWriter = secretsRepositoryWriter; ConfigurationApiFactory.schedulerJobClient = schedulerJobClient; ConfigurationApiFactory.synchronousSchedulerClient = synchronousSchedulerClient; ConfigurationApiFactory.archiveTtlManager = archiveTtlManager; @@ -100,6 +108,8 @@ public ConfigurationApi provide() { ConfigurationApiFactory.configRepository, ConfigurationApiFactory.jobPersistence, ConfigurationApiFactory.seed, + ConfigurationApiFactory.secretsRepositoryReader, + ConfigurationApiFactory.secretsRepositoryWriter, ConfigurationApiFactory.schedulerJobClient, ConfigurationApiFactory.synchronousSchedulerClient, ConfigurationApiFactory.archiveTtlManager, diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java index 3f7128e233c7..5839dd76719e 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java @@ -20,6 +20,8 @@ import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.DatabaseConfigPersistence; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.config.persistence.split_secrets.SecretsHydrator; import io.airbyte.db.Database; @@ -165,8 +167,10 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final Con final SecretsHydrator secretsHydrator = SecretPersistence.getSecretsHydrator(configs); final Optional secretPersistence = SecretPersistence.getLongLived(configs); final Optional ephemeralSecretPersistence = SecretPersistence.getEphemeral(configs); - final ConfigRepository configRepository = - new ConfigRepository(configPersistence, secretsHydrator, secretPersistence, ephemeralSecretPersistence, configDatabase); + final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); + final SecretsRepositoryReader secretsRepositoryReader = new SecretsRepositoryReader(configRepository, secretsHydrator); + final SecretsRepositoryWriter secretsRepositoryWriter = + new SecretsRepositoryWriter(configRepository, secretPersistence, ephemeralSecretPersistence); LOGGER.info("Creating jobs persistence..."); final Database jobDatabase = jobsDatabaseInstance.getInitialized(); @@ -204,6 +208,8 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final Con syncSchedulerClient, temporalService, configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, jobPersistence, seed, configDatabase, diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java index 1990d3cc9e05..881e17f686c9 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java @@ -12,6 +12,8 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SchedulerJobClient; @@ -32,6 +34,8 @@ ServerRunnable create(SchedulerJobClient schedulerJobClient, SynchronousSchedulerClient cachingSchedulerClient, WorkflowServiceStubs temporalService, ConfigRepository configRepository, + SecretsRepositoryReader secretsRepositoryReader, + SecretsRepositoryWriter secretsRepositoryWriter, JobPersistence jobPersistence, ConfigPersistence seed, Database configsDatabase, @@ -54,6 +58,8 @@ public ServerRunnable create(final SchedulerJobClient schedulerJobClient, final SynchronousSchedulerClient synchronousSchedulerClient, final WorkflowServiceStubs temporalService, final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final JobPersistence jobPersistence, final ConfigPersistence seed, final Database configsDatabase, @@ -72,6 +78,8 @@ public ServerRunnable create(final SchedulerJobClient schedulerJobClient, ConfigurationApiFactory.setValues( temporalService, configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, jobPersistence, seed, schedulerJobClient, diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java index fceb234a81e4..efaec668edd9 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java @@ -91,6 +91,8 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SchedulerJobClient; @@ -153,6 +155,8 @@ public class ConfigurationApi implements io.airbyte.api.V1Api { public ConfigurationApi(final ConfigRepository configRepository, final JobPersistence jobPersistence, final ConfigPersistence seed, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final SchedulerJobClient schedulerJobClient, final SynchronousSchedulerClient synchronousSchedulerClient, final FileTtlManager archiveTtlManager, @@ -184,6 +188,8 @@ public ConfigurationApi(final ConfigRepository configRepository, schedulerHandler = new SchedulerHandler( configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, schedulerJobClient, synchronousSchedulerClient, jobPersistence, @@ -198,10 +204,20 @@ public ConfigurationApi(final ConfigRepository configRepository, eventRunner, featureFlags, workerConfigs); - sourceHandler = new SourceHandler(configRepository, schemaValidator, connectionsHandler); + sourceHandler = new SourceHandler( + configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, + schemaValidator, + connectionsHandler); sourceDefinitionsHandler = new SourceDefinitionsHandler(configRepository, synchronousSchedulerClient, sourceHandler); operationsHandler = new OperationsHandler(configRepository); - destinationHandler = new DestinationHandler(configRepository, schemaValidator, connectionsHandler); + destinationHandler = new DestinationHandler( + configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, + schemaValidator, + connectionsHandler); destinationDefinitionsHandler = new DestinationDefinitionsHandler(configRepository, synchronousSchedulerClient, destinationHandler); workspacesHandler = new WorkspacesHandler(configRepository, connectionsHandler, destinationHandler, sourceHandler); jobHistoryHandler = new JobHistoryHandler(jobPersistence, workerEnvironment, logConfigs, connectionsHandler, sourceHandler, @@ -220,6 +236,8 @@ public ConfigurationApi(final ConfigRepository configRepository, archiveHandler = new ArchiveHandler( airbyteVersion, configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, jobPersistence, seed, workspaceHelper, diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/ConfigurationUpdate.java b/airbyte-server/src/main/java/io/airbyte/server/converters/ConfigurationUpdate.java index 99861c0d9529..501c1e01b6c6 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/converters/ConfigurationUpdate.java +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/ConfigurationUpdate.java @@ -12,6 +12,7 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.validation.json.JsonValidationException; @@ -21,21 +22,25 @@ public class ConfigurationUpdate { private final ConfigRepository configRepository; + private final SecretsRepositoryReader secretsRepositoryReader; private final JsonSecretsProcessor secretsProcessor; - public ConfigurationUpdate(final ConfigRepository configRepository) { - this(configRepository, new JsonSecretsProcessor()); + public ConfigurationUpdate(final ConfigRepository configRepository, final SecretsRepositoryReader secretsRepositoryReader) { + this(configRepository, secretsRepositoryReader, new JsonSecretsProcessor()); } - public ConfigurationUpdate(final ConfigRepository configRepository, final JsonSecretsProcessor secretsProcessor) { + public ConfigurationUpdate(final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final JsonSecretsProcessor secretsProcessor) { this.configRepository = configRepository; + this.secretsRepositoryReader = secretsRepositoryReader; this.secretsProcessor = secretsProcessor; } public SourceConnection source(final UUID sourceId, final String sourceName, final JsonNode newConfiguration) throws ConfigNotFoundException, IOException, JsonValidationException { // get existing source - final SourceConnection persistedSource = configRepository.getSourceConnectionWithSecrets(sourceId); + final SourceConnection persistedSource = secretsRepositoryReader.getSourceConnectionWithSecrets(sourceId); persistedSource.setName(sourceName); // get spec final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(persistedSource.getSourceDefinitionId()); @@ -52,7 +57,7 @@ public SourceConnection source(final UUID sourceId, final String sourceName, fin public DestinationConnection destination(final UUID destinationId, final String destName, final JsonNode newConfiguration) throws ConfigNotFoundException, IOException, JsonValidationException { // get existing destination - final DestinationConnection persistedDestination = configRepository.getDestinationConnectionWithSecrets(destinationId); + final DestinationConnection persistedDestination = secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationId); persistedDestination.setName(destName); // get spec final StandardDestinationDefinition destinationDefinition = configRepository diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java index 62b3c4a32725..b44ffb521da5 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/ArchiveHandler.java @@ -14,6 +14,8 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.scheduler.persistence.JobPersistence; import io.airbyte.scheduler.persistence.WorkspaceHelper; import io.airbyte.server.ConfigDumpExporter; @@ -38,6 +40,8 @@ public class ArchiveHandler { public ArchiveHandler(final AirbyteVersion version, final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final JobPersistence jobPersistence, final ConfigPersistence seed, final WorkspaceHelper workspaceHelper, @@ -46,8 +50,8 @@ public ArchiveHandler(final AirbyteVersion version, this( version, fileTtlManager, - new ConfigDumpExporter(configRepository, jobPersistence, workspaceHelper), - new ConfigDumpImporter(configRepository, jobPersistence, workspaceHelper, importDefinitions), + new ConfigDumpExporter(configRepository, secretsRepositoryReader, jobPersistence, workspaceHelper), + new ConfigDumpImporter(configRepository, secretsRepositoryWriter, jobPersistence, workspaceHelper, importDefinitions), seed); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java index 9347e30d7351..7a97a76f5121 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationHandler.java @@ -21,6 +21,8 @@ import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.server.converters.ConfigurationUpdate; @@ -40,18 +42,24 @@ public class DestinationHandler { private final ConnectionsHandler connectionsHandler; private final Supplier uuidGenerator; private final ConfigRepository configRepository; + private final SecretsRepositoryReader secretsRepositoryReader; + private final SecretsRepositoryWriter secretsRepositoryWriter; private final JsonSchemaValidator validator; private final ConfigurationUpdate configurationUpdate; private final JsonSecretsProcessor secretsProcessor; @VisibleForTesting DestinationHandler(final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final JsonSchemaValidator integrationSchemaValidation, final ConnectionsHandler connectionsHandler, final Supplier uuidGenerator, final JsonSecretsProcessor secretsProcessor, final ConfigurationUpdate configurationUpdate) { this.configRepository = configRepository; + this.secretsRepositoryReader = secretsRepositoryReader; + this.secretsRepositoryWriter = secretsRepositoryWriter; this.validator = integrationSchemaValidation; this.connectionsHandler = connectionsHandler; this.uuidGenerator = uuidGenerator; @@ -60,15 +68,19 @@ public class DestinationHandler { } public DestinationHandler(final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final JsonSchemaValidator integrationSchemaValidation, final ConnectionsHandler connectionsHandler) { this( configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, integrationSchemaValidation, connectionsHandler, UUID::randomUUID, new JsonSecretsProcessor(), - new ConfigurationUpdate(configRepository)); + new ConfigurationUpdate(configRepository, secretsRepositoryReader)); } public DestinationRead createDestination(final DestinationCreate destinationCreate) @@ -112,7 +124,7 @@ public void deleteDestination(final DestinationRead destination) connectionsHandler.deleteConnection(connectionRead.getConnectionId()); } - final var fullConfig = configRepository.getDestinationConnectionWithSecrets(destination.getDestinationId()).getConfiguration(); + final var fullConfig = secretsRepositoryReader.getDestinationConnectionWithSecrets(destination.getDestinationId()).getConfiguration(); // persist persistDestinationConnection( @@ -251,7 +263,7 @@ private void persistDestinationConnection(final String name, .withDestinationId(destinationId) .withConfiguration(configurationJson) .withTombstone(tombstone); - configRepository.writeDestinationConnection(destinationConnection, getSpec(destinationDefinitionId)); + secretsRepositoryWriter.writeDestinationConnection(destinationConnection, getSpec(destinationDefinitionId)); } private DestinationRead buildDestinationRead(final UUID destinationId) throws JsonValidationException, IOException, ConfigNotFoundException { @@ -275,7 +287,7 @@ private DestinationRead buildDestinationReadWithSecrets(final UUID destinationId throws ConfigNotFoundException, IOException, JsonValidationException { // remove secrets from config before returning the read - final DestinationConnection dci = Jsons.clone(configRepository.getDestinationConnectionWithSecrets(destinationId)); + final DestinationConnection dci = Jsons.clone(secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationId)); final StandardDestinationDefinition standardDestinationDefinition = configRepository.getStandardDestinationDefinition(dci.getDestinationDefinitionId()); return toDestinationRead(dci, standardDestinationDefinition); diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java index fda652ec0721..1ca24d3022e8 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SchedulerHandler.java @@ -43,6 +43,8 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.scheduler.client.EventRunner; @@ -77,6 +79,7 @@ public class SchedulerHandler { private static final Logger LOGGER = LoggerFactory.getLogger(SchedulerHandler.class); private final ConfigRepository configRepository; + private final SecretsRepositoryWriter secretsRepositoryWriter; private final SchedulerJobClient schedulerJobClient; private final SynchronousSchedulerClient synchronousSchedulerClient; private final ConfigurationUpdate configurationUpdate; @@ -92,6 +95,8 @@ public class SchedulerHandler { private final FeatureFlags featureFlags; public SchedulerHandler(final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final SchedulerJobClient schedulerJobClient, final SynchronousSchedulerClient synchronousSchedulerClient, final JobPersistence jobPersistence, @@ -104,9 +109,11 @@ public SchedulerHandler(final ConfigRepository configRepository, final FeatureFlags featureFlags) { this( configRepository, + secretsRepositoryWriter, + secretsRepositoryReader, schedulerJobClient, synchronousSchedulerClient, - new ConfigurationUpdate(configRepository), + new ConfigurationUpdate(configRepository, secretsRepositoryReader), new JsonSchemaValidator(), jobPersistence, jobNotifier, @@ -121,6 +128,8 @@ public SchedulerHandler(final ConfigRepository configRepository, @VisibleForTesting SchedulerHandler(final ConfigRepository configRepository, + final SecretsRepositoryWriter secretsRepositoryWriter, + final SecretsRepositoryReader secretsRepositoryReader, final SchedulerJobClient schedulerJobClient, final SynchronousSchedulerClient synchronousSchedulerClient, final ConfigurationUpdate configurationUpdate, @@ -135,6 +144,7 @@ public SchedulerHandler(final ConfigRepository configRepository, final FeatureFlags featureFlags, final JobConverter jobConverter) { this.configRepository = configRepository; + this.secretsRepositoryWriter = secretsRepositoryWriter; this.schedulerJobClient = schedulerJobClient; this.synchronousSchedulerClient = synchronousSchedulerClient; this.configurationUpdate = configurationUpdate; @@ -162,7 +172,7 @@ public CheckConnectionRead checkSourceConnectionFromSourceId(final SourceIdReque public CheckConnectionRead checkSourceConnectionFromSourceCreate(final SourceCoreConfig sourceConfig) throws ConfigNotFoundException, IOException, JsonValidationException { final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceConfig.getSourceDefinitionId()); - final var partialConfig = configRepository.statefulSplitEphemeralSecrets( + final var partialConfig = secretsRepositoryWriter.statefulSplitEphemeralSecrets( sourceConfig.getConnectionConfiguration(), sourceDef.getSpec()); @@ -202,7 +212,7 @@ public CheckConnectionRead checkDestinationConnectionFromDestinationId(final Des public CheckConnectionRead checkDestinationConnectionFromDestinationCreate(final DestinationCoreConfig destinationConfig) throws ConfigNotFoundException, IOException, JsonValidationException { final StandardDestinationDefinition destDef = configRepository.getStandardDestinationDefinition(destinationConfig.getDestinationDefinitionId()); - final var partialConfig = configRepository.statefulSplitEphemeralSecrets( + final var partialConfig = secretsRepositoryWriter.statefulSplitEphemeralSecrets( destinationConfig.getConnectionConfiguration(), destDef.getSpec()); @@ -243,7 +253,7 @@ public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final Source public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceCreate(final SourceCoreConfig sourceCreate) throws ConfigNotFoundException, IOException, JsonValidationException { final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceCreate.getSourceDefinitionId()); - final var partialConfig = configRepository.statefulSplitEphemeralSecrets( + final var partialConfig = secretsRepositoryWriter.statefulSplitEphemeralSecrets( sourceCreate.getConnectionConfiguration(), sourceDef.getSpec()); diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java index 2bf2bd1b6cc8..d5f56b25c957 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java @@ -20,6 +20,8 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.server.converters.ConfigurationUpdate; @@ -34,18 +36,24 @@ public class SourceHandler { private final Supplier uuidGenerator; private final ConfigRepository configRepository; + private final SecretsRepositoryReader secretsRepositoryReader; + private final SecretsRepositoryWriter secretsRepositoryWriter; private final JsonSchemaValidator validator; private final ConnectionsHandler connectionsHandler; private final ConfigurationUpdate configurationUpdate; private final JsonSecretsProcessor secretsProcessor; SourceHandler(final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final JsonSchemaValidator integrationSchemaValidation, final ConnectionsHandler connectionsHandler, final Supplier uuidGenerator, final JsonSecretsProcessor secretsProcessor, final ConfigurationUpdate configurationUpdate) { this.configRepository = configRepository; + this.secretsRepositoryReader = secretsRepositoryReader; + this.secretsRepositoryWriter = secretsRepositoryWriter; this.validator = integrationSchemaValidation; this.connectionsHandler = connectionsHandler; this.uuidGenerator = uuidGenerator; @@ -54,15 +62,19 @@ public class SourceHandler { } public SourceHandler(final ConfigRepository configRepository, + final SecretsRepositoryReader secretsRepositoryReader, + final SecretsRepositoryWriter secretsRepositoryWriter, final JsonSchemaValidator integrationSchemaValidation, final ConnectionsHandler connectionsHandler) { this( configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, integrationSchemaValidation, connectionsHandler, UUID::randomUUID, new JsonSecretsProcessor(), - new ConfigurationUpdate(configRepository)); + new ConfigurationUpdate(configRepository, secretsRepositoryReader)); } public SourceRead createSource(final SourceCreate sourceCreate) @@ -211,7 +223,7 @@ public void deleteSource(final SourceRead source) } final ConnectorSpecification spec = getSpecFromSourceId(source.getSourceId()); - final var fullConfig = configRepository.getSourceConnectionWithSecrets(source.getSourceId()).getConfiguration(); + final var fullConfig = secretsRepositoryReader.getSourceConnectionWithSecrets(source.getSourceId()).getConfiguration(); // persist persistSourceConnection( @@ -247,7 +259,7 @@ private SourceRead buildSourceRead(final UUID sourceId, final ConnectorSpecifica private SourceRead buildSourceReadWithSecrets(final UUID sourceId) throws ConfigNotFoundException, IOException, JsonValidationException { // read configuration from db - final SourceConnection sourceConnection = configRepository.getSourceConnectionWithSecrets(sourceId); + final SourceConnection sourceConnection = secretsRepositoryReader.getSourceConnectionWithSecrets(sourceId); final StandardSourceDefinition standardSourceDefinition = configRepository .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); return toSourceRead(sourceConnection, standardSourceDefinition); @@ -286,7 +298,7 @@ private void persistSourceConnection(final String name, .withTombstone(tombstone) .withConfiguration(configurationJson); - configRepository.writeSourceConnection(sourceConnection, spec); + secretsRepositoryWriter.writeSourceConnection(sourceConnection, spec); } protected static SourceRead toSourceRead(final SourceConnection sourceConnection, diff --git a/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java b/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java index 171ce2d01ab8..27b4b962865c 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/ConfigDumpImporterTest.java @@ -25,6 +25,8 @@ import io.airbyte.config.StandardSyncOperation.OperatorType; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.scheduler.persistence.DefaultJobPersistence; import io.airbyte.scheduler.persistence.JobPersistence; @@ -46,6 +48,8 @@ class ConfigDumpImporterTest { public static final AirbyteVersion TEST_VERSION = new AirbyteVersion("0.0.1-test-version"); private ConfigRepository configRepository; + private SecretsRepositoryReader secretsRepositoryReader; + private SecretsRepositoryWriter secretsRepositoryWriter; private ConfigDumpImporter configDumpImporter; private ConfigDumpExporter configDumpExporter; @@ -59,14 +63,22 @@ class ConfigDumpImporterTest { @BeforeEach public void setup() throws IOException, JsonValidationException, ConfigNotFoundException { configRepository = mock(ConfigRepository.class); + secretsRepositoryReader = mock(SecretsRepositoryReader.class); + secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final JobPersistence jobPersistence = mock(JobPersistence.class); final WorkspaceHelper workspaceHelper = mock(WorkspaceHelper.class); emptyConnectorSpec = new ConnectorSpecification().withConnectionSpecification(Jsons.emptyObject()); - configDumpImporter = - new ConfigDumpImporter(configRepository, jobPersistence, workspaceHelper, mock(JsonSchemaValidator.class), true); - configDumpExporter = new ConfigDumpExporter(configRepository, jobPersistence, workspaceHelper); + configDumpImporter = new ConfigDumpImporter( + configRepository, + secretsRepositoryWriter, + jobPersistence, + workspaceHelper, + mock(JsonSchemaValidator.class), + true); + + configDumpExporter = new ConfigDumpExporter(configRepository, secretsRepositoryReader, jobPersistence, workspaceHelper); workspaceId = UUID.randomUUID(); when(jobPersistence.getVersion()).thenReturn(Optional.of(TEST_VERSION.serialize())); @@ -138,12 +150,12 @@ public void setup() throws IOException, JsonValidationException, ConfigNotFoundE @Test public void testImportIntoWorkspaceWithConflicts() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.listSourceConnectionWithSecrets()) + when(secretsRepositoryReader.listSourceConnectionWithSecrets()) .thenReturn(List.of(sourceConnection, new SourceConnection() .withSourceId(UUID.randomUUID()) .withWorkspaceId(UUID.randomUUID()))); - when(configRepository.listDestinationConnectionWithSecrets()) + when(secretsRepositoryReader.listDestinationConnectionWithSecrets()) .thenReturn(List.of(destinationConnection, new DestinationConnection() .withDestinationId(UUID.randomUUID()) @@ -160,11 +172,11 @@ public void testImportIntoWorkspaceWithConflicts() throws JsonValidationExceptio final UUID newWorkspaceId = UUID.randomUUID(); configDumpImporter.importIntoWorkspace(TEST_VERSION, newWorkspaceId, archive); - verify(configRepository) + verify(secretsRepositoryWriter) .writeSourceConnection( Jsons.clone(sourceConnection).withWorkspaceId(newWorkspaceId).withSourceId(not(eq(sourceConnection.getSourceId()))), eq(emptyConnectorSpec)); - verify(configRepository).writeDestinationConnection( + verify(secretsRepositoryWriter).writeDestinationConnection( Jsons.clone(destinationConnection).withWorkspaceId(newWorkspaceId).withDestinationId(not(eq(destinationConnection.getDestinationId()))), eq(emptyConnectorSpec)); verify(configRepository) @@ -174,7 +186,7 @@ public void testImportIntoWorkspaceWithConflicts() throws JsonValidationExceptio @Test public void testImportIntoWorkspaceWithoutConflicts() throws JsonValidationException, ConfigNotFoundException, IOException { - when(configRepository.listSourceConnectionWithSecrets()) + when(secretsRepositoryReader.listSourceConnectionWithSecrets()) // First called for export .thenReturn(List.of(sourceConnection, new SourceConnection() @@ -184,7 +196,7 @@ public void testImportIntoWorkspaceWithoutConflicts() throws JsonValidationExcep .thenReturn(List.of(new SourceConnection() .withSourceId(UUID.randomUUID()) .withWorkspaceId(UUID.randomUUID()))); - when(configRepository.listDestinationConnectionWithSecrets()) + when(secretsRepositoryReader.listDestinationConnectionWithSecrets()) // First called for export .thenReturn(List.of(destinationConnection, new DestinationConnection() @@ -214,10 +226,10 @@ public void testImportIntoWorkspaceWithoutConflicts() throws JsonValidationExcep final UUID newWorkspaceId = UUID.randomUUID(); configDumpImporter.importIntoWorkspace(TEST_VERSION, newWorkspaceId, archive); - verify(configRepository).writeSourceConnection( - Jsons.clone(sourceConnection).withWorkspaceId(newWorkspaceId), - emptyConnectorSpec); - verify(configRepository).writeDestinationConnection(Jsons.clone(destinationConnection).withWorkspaceId(newWorkspaceId), emptyConnectorSpec); + verify(secretsRepositoryWriter) + .writeSourceConnection(Jsons.clone(sourceConnection).withWorkspaceId(newWorkspaceId), emptyConnectorSpec); + verify(secretsRepositoryWriter) + .writeDestinationConnection(Jsons.clone(destinationConnection).withWorkspaceId(newWorkspaceId), emptyConnectorSpec); verify(configRepository).writeStandardSyncOperation(Jsons.clone(operation).withWorkspaceId(newWorkspaceId)); verify(configRepository).writeStandardSync(connection); } diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java index 6875d87f38d0..051db7701798 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/ConfigurationApiTest.java @@ -17,6 +17,8 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.db.Database; import io.airbyte.scheduler.client.EventRunner; import io.airbyte.scheduler.client.SchedulerJobClient; @@ -40,6 +42,8 @@ void testImportDefinitions() { mock(ConfigRepository.class), mock(JobPersistence.class), mock(ConfigPersistence.class), + mock(SecretsRepositoryReader.class), + mock(SecretsRepositoryWriter.class), mock(SchedulerJobClient.class), mock(SynchronousSchedulerClient.class), mock(FileTtlManager.class), diff --git a/airbyte-server/src/test/java/io/airbyte/server/converters/ConfigurationUpdateTest.java b/airbyte-server/src/test/java/io/airbyte/server/converters/ConfigurationUpdateTest.java index 664702d79d8a..a03b62d1e74d 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/converters/ConfigurationUpdateTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/converters/ConfigurationUpdateTest.java @@ -17,6 +17,7 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConnectorSpecification; @@ -73,20 +74,22 @@ class ConfigurationUpdateTest { .withConfiguration(NEW_CONFIGURATION); private ConfigRepository configRepository; + private SecretsRepositoryReader secretsRepositoryReader; private JsonSecretsProcessor secretsProcessor; private ConfigurationUpdate configurationUpdate; @BeforeEach void setup() { configRepository = mock(ConfigRepository.class); + secretsRepositoryReader = mock(SecretsRepositoryReader.class); secretsProcessor = mock(JsonSecretsProcessor.class); - configurationUpdate = new ConfigurationUpdate(configRepository, secretsProcessor); + configurationUpdate = new ConfigurationUpdate(configRepository, secretsRepositoryReader, secretsProcessor); } @Test void testSourceUpdate() throws JsonValidationException, IOException, ConfigNotFoundException { - when(configRepository.getSourceConnectionWithSecrets(UUID1)).thenReturn(ORIGINAL_SOURCE_CONNECTION); + when(secretsRepositoryReader.getSourceConnectionWithSecrets(UUID1)).thenReturn(ORIGINAL_SOURCE_CONNECTION); when(configRepository.getStandardSourceDefinition(UUID2)).thenReturn(SOURCE_DEFINITION); when(secretsProcessor.copySecrets(ORIGINAL_CONFIGURATION, NEW_CONFIGURATION, SPEC)).thenReturn(NEW_CONFIGURATION); @@ -97,7 +100,7 @@ void testSourceUpdate() throws JsonValidationException, IOException, ConfigNotFo @Test void testDestinationUpdate() throws JsonValidationException, IOException, ConfigNotFoundException { - when(configRepository.getDestinationConnectionWithSecrets(UUID1)).thenReturn(ORIGINAL_DESTINATION_CONNECTION); + when(secretsRepositoryReader.getDestinationConnectionWithSecrets(UUID1)).thenReturn(ORIGINAL_DESTINATION_CONNECTION); when(configRepository.getStandardDestinationDefinition(UUID2)).thenReturn(DESTINATION_DEFINITION); when(secretsProcessor.copySecrets(ORIGINAL_CONFIGURATION, NEW_CONFIGURATION, SPEC)).thenReturn(NEW_CONFIGURATION); diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java index 7a7222d71412..543ce445452f 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/ArchiveHandlerTest.java @@ -33,6 +33,8 @@ import io.airbyte.config.persistence.ConfigPersistence; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.DatabaseConfigPersistence; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.config.persistence.split_secrets.NoOpSecretsHydrator; import io.airbyte.db.Database; import io.airbyte.db.instance.test.TestDatabaseProviders; @@ -74,6 +76,8 @@ public class ArchiveHandlerTest { private Database jobDatabase; private Database configDatabase; private JobPersistence jobPersistence; + private SecretsRepositoryReader secretsRepositoryReader; + private SecretsRepositoryWriter secretsRepositoryWriter; private DatabaseConfigPersistence configPersistence; private ConfigPersistence seedPersistence; @@ -114,13 +118,17 @@ public void setup() throws Exception { configPersistence = new DatabaseConfigPersistence(jobDatabase); configPersistence.replaceAllConfigs(Collections.emptyMap(), false); configPersistence.loadData(seedPersistence); - configRepository = new ConfigRepository(configPersistence, new NoOpSecretsHydrator(), Optional.empty(), Optional.empty(), configDatabase); + configRepository = new ConfigRepository(configPersistence, configDatabase); + secretsRepositoryReader = new SecretsRepositoryReader(configRepository, new NoOpSecretsHydrator()); + secretsRepositoryWriter = new SecretsRepositoryWriter(configRepository, Optional.empty(), Optional.empty()); jobPersistence.setVersion(VERSION.serialize()); archiveHandler = new ArchiveHandler( VERSION, configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, jobPersistence, YamlSeedConfigPersistence.getDefault(), new WorkspaceHelper(configRepository, jobPersistence), @@ -139,21 +147,21 @@ void tearDown() throws Exception { */ @Test void testFullExportImportRoundTrip() throws Exception { - assertSameConfigDump(seedPersistence.dumpConfigs(), configRepository.dumpConfigs()); + assertSameConfigDump(seedPersistence.dumpConfigs(), secretsRepositoryReader.dumpConfigsWithSecrets()); // Export the configs. File archive = archiveHandler.exportData(); // After deleting the configs, the dump becomes empty. configPersistence.replaceAllConfigs(Collections.emptyMap(), false); - assertSameConfigDump(Collections.emptyMap(), configRepository.dumpConfigs()); + assertSameConfigDump(Collections.emptyMap(), secretsRepositoryReader.dumpConfigsWithSecrets()); // After importing the configs, the dump is restored. assertTrue(archive.exists()); final ImportRead importResult = archiveHandler.importData(archive); assertFalse(archive.exists()); assertEquals(StatusEnum.SUCCEEDED, importResult.getStatus()); - assertSameConfigDump(seedPersistence.dumpConfigs(), configRepository.dumpConfigs()); + assertSameConfigDump(seedPersistence.dumpConfigs(), secretsRepositoryReader.dumpConfigsWithSecrets()); // When a connector definition is in use, it will not be updated. final UUID sourceS3DefinitionId = UUID.fromString("69589781-7828-43c5-9f63-8925b1c1ccc2"); @@ -213,12 +221,12 @@ void testFullExportImportRoundTrip() throws Exception { @Test void testLightWeightExportImportRoundTrip() throws Exception { - assertSameConfigDump(seedPersistence.dumpConfigs(), configRepository.dumpConfigs()); + assertSameConfigDump(seedPersistence.dumpConfigs(), secretsRepositoryReader.dumpConfigsWithSecrets()); // Insert some workspace data final UUID workspaceId = UUID.randomUUID(); setupTestData(workspaceId); - final Map> workspaceDump = configRepository.dumpConfigs(); + final Map> workspaceDump = secretsRepositoryReader.dumpConfigsWithSecrets(); // Insert some other workspace data setupTestData(UUID.randomUUID()); @@ -230,11 +238,11 @@ void testLightWeightExportImportRoundTrip() throws Exception { // After deleting all the configs, the dump becomes empty. configPersistence.replaceAllConfigs(Collections.emptyMap(), false); - assertSameConfigDump(Collections.emptyMap(), configRepository.dumpConfigs()); + assertSameConfigDump(Collections.emptyMap(), secretsRepositoryReader.dumpConfigsWithSecrets()); // Restore default seed data configPersistence.loadData(seedPersistence); - assertSameConfigDump(seedPersistence.dumpConfigs(), configRepository.dumpConfigs()); + assertSameConfigDump(seedPersistence.dumpConfigs(), secretsRepositoryReader.dumpConfigsWithSecrets()); setupWorkspaceData(workspaceId); @@ -247,11 +255,11 @@ void testLightWeightExportImportRoundTrip() throws Exception { .resourceId(uploadRead.getResourceId()) .workspaceId(workspaceId)); assertEquals(StatusEnum.SUCCEEDED, importResult.getStatus()); - assertSameConfigDump(workspaceDump, configRepository.dumpConfigs()); + assertSameConfigDump(workspaceDump, secretsRepositoryReader.dumpConfigsWithSecrets()); // we modify first workspace setupTestData(workspaceId); - final Map> secondWorkspaceDump = configRepository.dumpConfigs(); + final Map> secondWorkspaceDump = secretsRepositoryReader.dumpConfigsWithSecrets(); final UUID secondWorkspaceId = UUID.randomUUID(); setupWorkspaceData(secondWorkspaceId); @@ -264,7 +272,7 @@ void testLightWeightExportImportRoundTrip() throws Exception { .workspaceId(secondWorkspaceId)); assertEquals(StatusEnum.SUCCEEDED, secondImportResult.getStatus()); - final UUID secondSourceId = configRepository.listSourceConnectionWithSecrets() + final UUID secondSourceId = secretsRepositoryReader.listSourceConnectionWithSecrets() .stream() .filter(sourceConnection -> secondWorkspaceId.equals(sourceConnection.getWorkspaceId())) .map(SourceConnection::getSourceId) @@ -293,7 +301,7 @@ void testLightWeightExportImportRoundTrip() throws Exception { when(emptyConnectorSpec.getConnectionSpecification()).thenReturn(Jsons.emptyObject()); configRepository.writeStandardSourceDefinition(standardSourceDefinition); - configRepository.writeSourceConnection(sourceConnection, emptyConnectorSpec); + secretsRepositoryWriter.writeSourceConnection(sourceConnection, emptyConnectorSpec); // check that first workspace is unchanged even though modifications were made to second workspace // (that contains similar connections from importing the same archive) @@ -307,7 +315,7 @@ void testLightWeightExportImportRoundTrip() throws Exception { .resourceId(uploadRead.getResourceId()) .workspaceId(workspaceId)); assertEquals(StatusEnum.SUCCEEDED, importResult.getStatus()); - assertSameConfigDump(secondWorkspaceDump, configRepository.dumpConfigs()); + assertSameConfigDump(secondWorkspaceDump, secretsRepositoryReader.dumpConfigsWithSecrets()); } private void setupWorkspaceData(final UUID workspaceId) throws IOException, JsonValidationException { diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java index ce5ee807f08b..ca044e6bc5cb 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationHandlerTest.java @@ -27,6 +27,8 @@ import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.server.converters.ConfigurationUpdate; @@ -43,6 +45,8 @@ class DestinationHandlerTest { private ConfigRepository configRepository; + private SecretsRepositoryReader secretsRepositoryReader; + private SecretsRepositoryWriter secretsRepositoryWriter; private StandardDestinationDefinition standardDestinationDefinition; private DestinationDefinitionSpecificationRead destinationDefinitionSpecificationRead; private DestinationConnection destinationConnection; @@ -59,6 +63,8 @@ class DestinationHandlerTest { @BeforeEach void setUp() throws IOException { configRepository = mock(ConfigRepository.class); + secretsRepositoryReader = mock(SecretsRepositoryReader.class); + secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); validator = mock(JsonSchemaValidator.class); uuidGenerator = mock(Supplier.class); connectionsHandler = mock(ConnectionsHandler.class); @@ -91,7 +97,14 @@ void setUp() throws IOException { destinationConnection = DestinationHelpers.generateDestination(standardDestinationDefinition.getDestinationDefinitionId()); destinationHandler = - new DestinationHandler(configRepository, validator, connectionsHandler, uuidGenerator, secretsProcessor, configurationUpdate); + new DestinationHandler(configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, + validator, + connectionsHandler, + uuidGenerator, + secretsProcessor, + configurationUpdate); } @Test @@ -125,7 +138,7 @@ void testCreateDestination() throws JsonValidationException, ConfigNotFoundExcep assertEquals(expectedDestinationRead, actualDestinationRead); verify(validator).ensure(destinationDefinitionSpecificationRead.getConnectionSpecification(), destinationConnection.getConfiguration()); - verify(configRepository).writeDestinationConnection(destinationConnection, connectorSpecification); + verify(secretsRepositoryWriter).writeDestinationConnection(destinationConnection, connectorSpecification); verify(secretsProcessor) .maskSecrets(destinationConnection.getConfiguration(), destinationDefinitionSpecificationRead.getConnectionSpecification()); } @@ -168,7 +181,7 @@ void testUpdateDestination() throws JsonValidationException, ConfigNotFoundExcep assertEquals(expectedDestinationRead, actualDestinationRead); verify(secretsProcessor).maskSecrets(newConfiguration, destinationDefinitionSpecificationRead.getConnectionSpecification()); - verify(configRepository).writeDestinationConnection(expectedDestinationConnection, connectorSpecification); + verify(secretsRepositoryWriter).writeDestinationConnection(expectedDestinationConnection, connectorSpecification); verify(validator).ensure(destinationDefinitionSpecificationRead.getConnectionSpecification(), newConfiguration); } @@ -272,7 +285,7 @@ void testCloneDestination() throws JsonValidationException, ConfigNotFoundExcept final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody().destinationId(destinationRead.getDestinationId()); when(uuidGenerator.get()).thenReturn(clonedConnection.getDestinationId()); - when(configRepository.getDestinationConnectionWithSecrets(destinationConnection.getDestinationId())).thenReturn(destinationConnection); + when(secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationConnection.getDestinationId())).thenReturn(destinationConnection); when(configRepository.getDestinationConnection(clonedConnection.getDestinationId())).thenReturn(clonedConnection); when(configRepository.getStandardDestinationDefinition(destinationDefinitionSpecificationRead.getDestinationDefinitionId())) diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java index 4e63cf4f6c5b..b039e5cf645b 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/SchedulerHandlerTest.java @@ -58,6 +58,8 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConnectorSpecification; @@ -127,6 +129,8 @@ class SchedulerHandlerTest { private SchedulerHandler schedulerHandler; private ConfigRepository configRepository; + private SecretsRepositoryReader secretsRepositoryReader; + private SecretsRepositoryWriter secretsRepositoryWriter; private Job completedJob; private SchedulerJobClient schedulerJobClient; private SynchronousSchedulerClient synchronousSchedulerClient; @@ -151,6 +155,8 @@ void setup() { schedulerJobClient = spy(SchedulerJobClient.class); synchronousSchedulerClient = mock(SynchronousSchedulerClient.class); configRepository = mock(ConfigRepository.class); + secretsRepositoryReader = mock(SecretsRepositoryReader.class); + secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); jobPersistence = mock(JobPersistence.class); final JobNotifier jobNotifier = mock(JobNotifier.class); eventRunner = mock(EventRunner.class); @@ -162,6 +168,8 @@ void setup() { schedulerHandler = new SchedulerHandler( configRepository, + secretsRepositoryWriter, + secretsRepositoryReader, schedulerJobClient, synchronousSchedulerClient, configurationUpdate, @@ -212,7 +220,7 @@ void testCheckSourceConnectionFromSourceCreate() throws JsonValidationException, .withDockerRepository(SOURCE_DOCKER_REPO) .withDockerImageTag(SOURCE_DOCKER_TAG) .withSourceDefinitionId(source.getSourceDefinitionId())); - when(configRepository.statefulSplitEphemeralSecrets( + when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( eq(source.getConfiguration()), any())).thenReturn(source.getConfiguration()); when(synchronousSchedulerClient.createSourceCheckConnectionJob(source, SOURCE_DOCKER_IMAGE)) @@ -244,7 +252,7 @@ void testCheckSourceConnectionFromUpdate() throws IOException, JsonValidationExc .withConfiguration(source.getConfiguration()); when(synchronousSchedulerClient.createSourceCheckConnectionJob(submittedSource, DESTINATION_DOCKER_IMAGE)) .thenReturn((SynchronousResponse) jobResponse); - when(configRepository.statefulSplitEphemeralSecrets( + when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( eq(source.getConfiguration()), any())).thenReturn(source.getConfiguration()); schedulerHandler.checkSourceConnectionFromSourceIdForUpdate(sourceUpdate); @@ -331,7 +339,7 @@ void testCheckDestinationConnectionFromDestinationCreate() throws JsonValidation when(synchronousSchedulerClient.createDestinationCheckConnectionJob(destination, DESTINATION_DOCKER_IMAGE)) .thenReturn((SynchronousResponse) jobResponse); - when(configRepository.statefulSplitEphemeralSecrets( + when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( eq(destination.getConfiguration()), any())).thenReturn(destination.getConfiguration()); schedulerHandler.checkDestinationConnectionFromDestinationCreate(destinationCoreConfig); @@ -361,7 +369,7 @@ void testCheckDestinationConnectionFromUpdate() throws IOException, JsonValidati .withConfiguration(destination.getConfiguration()); when(synchronousSchedulerClient.createDestinationCheckConnectionJob(submittedDestination, DESTINATION_DOCKER_IMAGE)) .thenReturn((SynchronousResponse) jobResponse); - when(configRepository.statefulSplitEphemeralSecrets( + when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( eq(destination.getConfiguration()), any())).thenReturn(destination.getConfiguration()); schedulerHandler.checkDestinationConnectionFromDestinationIdForUpdate(destinationUpdate); @@ -449,7 +457,7 @@ void testDiscoverSchemaForSourceFromSourceCreate() throws JsonValidationExceptio .withSourceDefinitionId(source.getSourceDefinitionId())); when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE)) .thenReturn(discoverResponse); - when(configRepository.statefulSplitEphemeralSecrets( + when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( eq(source.getConfiguration()), any())).thenReturn(source.getConfiguration()); @@ -478,7 +486,7 @@ void testDiscoverSchemaForSourceFromSourceCreateFailed() throws JsonValidationEx .withSourceDefinitionId(source.getSourceDefinitionId())); when(synchronousSchedulerClient.createDiscoverSchemaJob(source, SOURCE_DOCKER_IMAGE)) .thenReturn((SynchronousResponse) jobResponse); - when(configRepository.statefulSplitEphemeralSecrets( + when(secretsRepositoryWriter.statefulSplitEphemeralSecrets( eq(source.getConfiguration()), any())).thenReturn(source.getConfiguration()); when(completedJob.getSuccessOutput()).thenReturn(Optional.empty()); @@ -622,10 +630,10 @@ void testEnumConversion() { void testNewSchedulerSync() throws JsonValidationException, ConfigNotFoundException, IOException { when(featureFlags.usesNewScheduler()).thenReturn(true); - UUID connectionId = UUID.randomUUID(); + final UUID connectionId = UUID.randomUUID(); - long jobId = 123L; - ManualSyncSubmissionResult manualSyncSubmissionResult = ManualSyncSubmissionResult + final long jobId = 123L; + final ManualSyncSubmissionResult manualSyncSubmissionResult = ManualSyncSubmissionResult .builder() .failingReason(Optional.empty()) .jobId(Optional.of(jobId)) diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java index 3e652d51c736..d699d009b201 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceHandlerTest.java @@ -31,6 +31,8 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; +import io.airbyte.config.persistence.SecretsRepositoryWriter; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.server.converters.ConfigurationUpdate; @@ -49,6 +51,8 @@ class SourceHandlerTest { private ConfigRepository configRepository; + private SecretsRepositoryReader secretsRepositoryReader; + private SecretsRepositoryWriter secretsRepositoryWriter; private StandardSourceDefinition standardSourceDefinition; private SourceDefinitionSpecificationRead sourceDefinitionSpecificationRead; private SourceConnection sourceConnection; @@ -65,6 +69,8 @@ class SourceHandlerTest { @BeforeEach void setUp() throws IOException { configRepository = mock(ConfigRepository.class); + secretsRepositoryReader = mock(SecretsRepositoryReader.class); + secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); validator = mock(JsonSchemaValidator.class); connectionsHandler = mock(ConnectionsHandler.class); configurationUpdate = mock(ConfigurationUpdate.class); @@ -90,7 +96,14 @@ void setUp() throws IOException { sourceConnection = SourceHelpers.generateSource(standardSourceDefinition.getSourceDefinitionId()); - sourceHandler = new SourceHandler(configRepository, validator, connectionsHandler, uuidGenerator, secretsProcessor, configurationUpdate); + sourceHandler = new SourceHandler(configRepository, + secretsRepositoryReader, + secretsRepositoryWriter, + validator, + connectionsHandler, + uuidGenerator, + secretsProcessor, + configurationUpdate); } @Test @@ -116,7 +129,7 @@ void testCreateSource() throws JsonValidationException, ConfigNotFoundException, assertEquals(expectedSourceRead, actualSourceRead); verify(secretsProcessor).maskSecrets(sourceCreate.getConnectionConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification()); - verify(configRepository).writeSourceConnection(sourceConnection, connectorSpecification); + verify(secretsRepositoryWriter).writeSourceConnection(sourceConnection, connectorSpecification); verify(validator).ensure(sourceDefinitionSpecificationRead.getConnectionSpecification(), sourceConnection.getConfiguration()); } @@ -157,7 +170,7 @@ void testUpdateSource() throws JsonValidationException, ConfigNotFoundException, assertEquals(expectedSourceRead, actualSourceRead); verify(secretsProcessor).maskSecrets(newConfiguration, sourceDefinitionSpecificationRead.getConnectionSpecification()); - verify(configRepository).writeSourceConnection(expectedSourceConnection, connectorSpecification); + verify(secretsRepositoryWriter).writeSourceConnection(expectedSourceConnection, connectorSpecification); verify(validator).ensure(sourceDefinitionSpecificationRead.getConnectionSpecification(), newConfiguration); } @@ -188,7 +201,7 @@ void testCloneSource() throws JsonValidationException, ConfigNotFoundException, final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(sourceRead.getSourceId()); when(uuidGenerator.get()).thenReturn(clonedConnection.getSourceId()); - when(configRepository.getSourceConnectionWithSecrets(sourceConnection.getSourceId())).thenReturn(sourceConnection); + when(secretsRepositoryReader.getSourceConnectionWithSecrets(sourceConnection.getSourceId())).thenReturn(sourceConnection); when(configRepository.getSourceConnection(clonedConnection.getSourceId())).thenReturn(clonedConnection); when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) @@ -281,7 +294,7 @@ void testDeleteSource() throws JsonValidationException, ConfigNotFoundException, when(configRepository.getSourceConnection(sourceConnection.getSourceId())) .thenReturn(sourceConnection) .thenReturn(expectedSourceConnection); - when(configRepository.getSourceConnectionWithSecrets(sourceConnection.getSourceId())) + when(secretsRepositoryReader.getSourceConnectionWithSecrets(sourceConnection.getSourceId())) .thenReturn(sourceConnection) .thenReturn(expectedSourceConnection); when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) @@ -293,7 +306,7 @@ void testDeleteSource() throws JsonValidationException, ConfigNotFoundException, sourceHandler.deleteSource(sourceIdRequestBody); - verify(configRepository).writeSourceConnection(expectedSourceConnection, connectorSpecification); + verify(secretsRepositoryWriter).writeSourceConnection(expectedSourceConnection, connectorSpecification); verify(connectionsHandler).listConnectionsForWorkspace(workspaceIdRequestBody); final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody() .connectionId(connectionRead.getConnectionId()); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java index adfe820c7746..f47d5786f5b5 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/WorkerApp.java @@ -369,10 +369,7 @@ private static void launchWorkerApp() throws IOException { configs.getConfigDatabaseUrl()) .getInitialized(); final ConfigPersistence configPersistence = DatabaseConfigPersistence.createWithValidation(configDatabase); - final Optional secretPersistence = SecretPersistence.getLongLived(configs); - final Optional ephemeralSecretPersistence = SecretPersistence.getEphemeral(configs); - final ConfigRepository configRepository = - new ConfigRepository(configPersistence, secretsHydrator, secretPersistence, ephemeralSecretPersistence, configDatabase); + final ConfigRepository configRepository = new ConfigRepository(configPersistence, configDatabase); final Database jobDatabase = new JobsDatabaseInstance( configs.getDatabaseUser(), From c807df2a210700d134348def37eeb6d26d62a04f Mon Sep 17 00:00:00 2001 From: Charles Date: Sun, 13 Mar 2022 14:45:36 -0700 Subject: [PATCH 34/38] Add readmes to all modules (#8893) --- airbyte-analytics/readme.md | 3 +++ airbyte-api/readme.md | 11 +++++++++++ airbyte-bootloader/readme.md | 6 ++++++ airbyte-cli/readme.md | 3 +++ airbyte-commons-cli/readme.md | 2 ++ airbyte-commons-docker/readme.md | 3 +++ airbyte-commons/build.gradle | 2 +- airbyte-commons/readme.md | 7 +++++++ airbyte-config/init/readme.md | 8 ++++++++ airbyte-config/persistence/readme.md | 7 +++++++ airbyte-container-orchestrator/readme.md | 6 ++++++ airbyte-integrations/bases/readme.md | 6 ++++++ airbyte-json-validation/readme.md | 7 +++++++ airbyte-metrics/readme.md | 3 +++ airbyte-notification/readme.md | 6 ++++++ airbyte-oauth/readme.md | 7 +++++++ airbyte-protocol/readme.md | 7 +++++++ airbyte-queue/readme.md | 6 ++++++ airbyte-scheduler/app/readme.md | 7 +++++++ airbyte-scheduler/client/readme.md | 7 +++++++ airbyte-scheduler/models/readme.md | 8 ++++++++ airbyte-scheduler/persistence/readme.md | 7 +++++++ airbyte-server/readme.md | 5 +++++ airbyte-temporal/README.md | 4 ++++ airbyte-test-utils/readme.md | 3 +++ airbyte-tests/readme.md | 5 +++++ airbyte-webapp/README.md | 8 ++++++++ airbyte-workers/README.md | 10 +++++++--- buildSrc/readme.md | 3 +++ charts/airbyte/README.md | 2 ++ docs/readme.md | 3 +++ tools/README.md | 2 ++ 32 files changed, 170 insertions(+), 4 deletions(-) create mode 100644 airbyte-analytics/readme.md create mode 100644 airbyte-api/readme.md create mode 100644 airbyte-bootloader/readme.md create mode 100644 airbyte-cli/readme.md create mode 100644 airbyte-commons-docker/readme.md create mode 100644 airbyte-commons/readme.md create mode 100644 airbyte-config/init/readme.md create mode 100644 airbyte-config/persistence/readme.md create mode 100644 airbyte-container-orchestrator/readme.md create mode 100644 airbyte-integrations/bases/readme.md create mode 100644 airbyte-json-validation/readme.md create mode 100644 airbyte-metrics/readme.md create mode 100644 airbyte-notification/readme.md create mode 100644 airbyte-oauth/readme.md create mode 100644 airbyte-protocol/readme.md create mode 100644 airbyte-queue/readme.md create mode 100644 airbyte-scheduler/app/readme.md create mode 100644 airbyte-scheduler/client/readme.md create mode 100644 airbyte-scheduler/models/readme.md create mode 100644 airbyte-scheduler/persistence/readme.md create mode 100644 airbyte-server/readme.md create mode 100644 airbyte-test-utils/readme.md create mode 100644 airbyte-tests/readme.md create mode 100644 buildSrc/readme.md create mode 100644 docs/readme.md diff --git a/airbyte-analytics/readme.md b/airbyte-analytics/readme.md new file mode 100644 index 000000000000..4c66fb35c24d --- /dev/null +++ b/airbyte-analytics/readme.md @@ -0,0 +1,3 @@ +# airbyte-analytics + +Java library with shared code for telemetry tracking including Segment. diff --git a/airbyte-api/readme.md b/airbyte-api/readme.md new file mode 100644 index 000000000000..33ffeeb918dd --- /dev/null +++ b/airbyte-api/readme.md @@ -0,0 +1,11 @@ +# airbyte-api + +Defines the OpenApi configuration for the Airbyte Configuration API. It also is responsible for generating the following from the API spec: +* Java API client +* Java API server - this generated code is used in `airbyte-server` to allow us to implement the Configuration API in a type safe way. See `ConfigurationApi.java` in `airbyte-server` +* API docs + +## Key Files +* src/openapi/config.yaml - Defines the config API interface using OpenApi3 +* AirbyteApiClient.java - wraps all api clients so that they can be dependency injected together +* PatchedLogsApi.java - fixes generated code for log api. diff --git a/airbyte-bootloader/readme.md b/airbyte-bootloader/readme.md new file mode 100644 index 000000000000..c27261073832 --- /dev/null +++ b/airbyte-bootloader/readme.md @@ -0,0 +1,6 @@ +# airbyte-bootloader + +This application runs at start up for Airbyte. It is responsible for making sure that the environment is upgraded and in a good state. e.g. It makes sure the database has been migrated to the correct version. + +## Entrypoint +* BootloaderApp.java - has the main method for running the bootloader. diff --git a/airbyte-cli/readme.md b/airbyte-cli/readme.md new file mode 100644 index 000000000000..c65791d7c440 --- /dev/null +++ b/airbyte-cli/readme.md @@ -0,0 +1,3 @@ +# airbyte-cli + +Thin CLI over the Airbyte Configuration API to make it easier to interact with the API from the command line. diff --git a/airbyte-commons-cli/readme.md b/airbyte-commons-cli/readme.md index bd4bd8aab272..81aa7feb0b33 100644 --- a/airbyte-commons-cli/readme.md +++ b/airbyte-commons-cli/readme.md @@ -1 +1,3 @@ +# airbyte-commons-cli + This module houses utility functions for the `commons-cli` library. It is separate from `commons`, because it depends on external library `commons-cli` which we do not want to introduce as a dependency to every module. diff --git a/airbyte-commons-docker/readme.md b/airbyte-commons-docker/readme.md new file mode 100644 index 000000000000..46ed14b70f14 --- /dev/null +++ b/airbyte-commons-docker/readme.md @@ -0,0 +1,3 @@ +# airbyte-commons-docker + +This module contains common helpers for interacting with Docker and Docker images from Java. diff --git a/airbyte-commons/build.gradle b/airbyte-commons/build.gradle index 1c8e4129e8c2..2e9147968aa9 100644 --- a/airbyte-commons/build.gradle +++ b/airbyte-commons/build.gradle @@ -3,5 +3,5 @@ plugins { } dependencies { - // Dependencies for this module should be specified in the top-level build.gradle. + // Dependencies for this module should be specified in the top-level build.gradle. See readme for more explanation. } diff --git a/airbyte-commons/readme.md b/airbyte-commons/readme.md new file mode 100644 index 000000000000..acbd8542866a --- /dev/null +++ b/airbyte-commons/readme.md @@ -0,0 +1,7 @@ +# airbyte-commons + +Common java helpers. + +This submodule is inherited by all other java modules in the monorepo! It is therefore important that we do not add dependencies to it, as those dependencies will also be added to every java module. The only dependencies that this module uses are the ones declared in the `build.gradle` at the root of the Airbyte monorepo. In other words it only uses dependencies that are already shared across all modules. The `dependencies` section of the `build.gradle` of `airbyte-commons` should always be empty. + +For other common java code that needs to be shared across modules that requires additional dependencies, we follow this convention: `airbyte-commons-`. See for example `airbyte-commons-cli` and `airbyte-commons-docker`. diff --git a/airbyte-config/init/readme.md b/airbyte-config/init/readme.md new file mode 100644 index 000000000000..76ce39968158 --- /dev/null +++ b/airbyte-config/init/readme.md @@ -0,0 +1,8 @@ +# airbyte-config:init + +This module fulfills two responsibilities: +1. It is where we declare what connectors should ship with the Platform. See below for more instruction on how it works. +2. It contains the scripts and Dockerfile that allow the `docker-compose` version of Airbyte to mount the local filesystem. This is helpful in cases where a user wants to use a connector that interacts with (reads data from or writes data to) the local filesystem. e.g. `destination-local-json`. + +## Declaring connectors that ship with the Platform +In order to have a connector ship with the Platform is must be present in the respective `source_definitions.yaml` or `destination_definitions.yaml` files in `src/main/resources/seed`. If a connector is added there, the build system will handle fetching its spec and adding it to `source_specs.yaml` or `destination_specs.yaml`. See the gradle tasks to understand how this all works. The logic for fetching the specs is in `airbyte-config:specs`. diff --git a/airbyte-config/persistence/readme.md b/airbyte-config/persistence/readme.md new file mode 100644 index 000000000000..3e4a94f13fe0 --- /dev/null +++ b/airbyte-config/persistence/readme.md @@ -0,0 +1,7 @@ +# airbyte-config:persistence + +This module contains the logic for accessing the config database. This database is primarily used by the `airbyte-server` but is also accessed from `airbyte-scheduler` and `airbyte-workers`. It contains all configuration information for Airbyte. + +## Key files +* `ConfigPersistence.java` is the interface over "low-level" access to the db. The most commonly used implementation of it is `DatabaseConfigPersistence.java` The only other one that is used is the `YamlSeedConfigPersistence.java` which is used for loading configs that ship with the app. +* `ConfigRepository.java` is what is most used for accessing the databases. The `ConfigPersistence` iface was hard to work with. `ConfigRepository` builds on top of it and houses any databases queries to keep them from proliferating throughout the codebase. diff --git a/airbyte-container-orchestrator/readme.md b/airbyte-container-orchestrator/readme.md new file mode 100644 index 000000000000..05f8a46120b4 --- /dev/null +++ b/airbyte-container-orchestrator/readme.md @@ -0,0 +1,6 @@ +# airbyte-container-orchestrator + +This module contains logic to handle launching connector containers. It is called from the temporal workflows in `airbyte-workers`. It is called from the worker and spins up in a separate pod so that sync workflows can be isolated from each other. + +## Entrypoint +* `ContainerOrchestratorApp.java` diff --git a/airbyte-integrations/bases/readme.md b/airbyte-integrations/bases/readme.md new file mode 100644 index 000000000000..46f0e1c3b87b --- /dev/null +++ b/airbyte-integrations/bases/readme.md @@ -0,0 +1,6 @@ +# airbyte-integrations:bases + +This directory contains modules that contain shared code or can be inherited when writing connectors. + +## Key Files +todo (cgardens) - each of these submodules in this directory should have their own readmes. diff --git a/airbyte-json-validation/readme.md b/airbyte-json-validation/readme.md new file mode 100644 index 000000000000..57eff20af41c --- /dev/null +++ b/airbyte-json-validation/readme.md @@ -0,0 +1,7 @@ +# airbyte-json-validation + +This module contains shared Java code for validating JSON objects. + +## Key Files +* `JsonSchemaValidator.java` is the main entrypoint into this library, defining convenience methods for validation. +* `ConfigSchemaValidator.java` is additional sugar to make it easy to validate objects whose schemas are defined in `ConfigSchema`. diff --git a/airbyte-metrics/readme.md b/airbyte-metrics/readme.md new file mode 100644 index 000000000000..332acbb701f3 --- /dev/null +++ b/airbyte-metrics/readme.md @@ -0,0 +1,3 @@ +# airbyte-metrics + +Responsible for logic related to pushing monitoring and performance metrics to external aggregates. This is only used if explicitly turned on by the user. diff --git a/airbyte-notification/readme.md b/airbyte-notification/readme.md new file mode 100644 index 000000000000..381afb511d4f --- /dev/null +++ b/airbyte-notification/readme.md @@ -0,0 +1,6 @@ +# airbyte-notification + +Logic for handling notifications (e.g. success / failure) that are emitted from jobs. + +## Key Files +* `NotificationClient.java` wraps the clients for the different notification providers that we integrate with. Additional clients for each integration are houses in this module (e.g. SlackNotificationClient). diff --git a/airbyte-oauth/readme.md b/airbyte-oauth/readme.md new file mode 100644 index 000000000000..aef6d28357c3 --- /dev/null +++ b/airbyte-oauth/readme.md @@ -0,0 +1,7 @@ +# airbyte-oauth + +Library for request handling for OAuth Connectors. While Connectors define many OAuth attributes in their spec, the request sequence is executed in the `airbyte-server`. This module contains that logic. + +## Key Files +* `OAuthFlowImplementation.java` - interface that a source has to implement in order to do OAuth with Airbyte. +* `OAuthImplementationFactory.java` - catalog of the sources for which we support OAuth. diff --git a/airbyte-protocol/readme.md b/airbyte-protocol/readme.md new file mode 100644 index 000000000000..7842ed1f954c --- /dev/null +++ b/airbyte-protocol/readme.md @@ -0,0 +1,7 @@ +# airbyte-protocol + +Declares the Airbyte Protocol. + +## Key Files +* `airbyte_protocol.yaml` - declares the Airbyte Protocol (in JSONSchema) +* `io.airbyte.protocol.models` - this package contains various java helpers for working with the protocol. diff --git a/airbyte-queue/readme.md b/airbyte-queue/readme.md new file mode 100644 index 000000000000..16678592af58 --- /dev/null +++ b/airbyte-queue/readme.md @@ -0,0 +1,6 @@ +# airbyte-queue + +Wraps an external tool that provides an on-disk queue. + +## Entrypoint +* `OnDiskQueue.java` diff --git a/airbyte-scheduler/app/readme.md b/airbyte-scheduler/app/readme.md new file mode 100644 index 000000000000..b1709a58c823 --- /dev/null +++ b/airbyte-scheduler/app/readme.md @@ -0,0 +1,7 @@ +# airbyte-scheduler:app + +This module contains the Scheduler App. The main method can be found in `SchedulerApp.java`. The Scheduler is responsible for: +1. Determining if it is time to schedule a Sync Job for a Connection. +2. Submitting pending Jobs to the Workers. +3. Retrying failing Jobs. +4. Clearing out old Job History (so it does not become a space concern). diff --git a/airbyte-scheduler/client/readme.md b/airbyte-scheduler/client/readme.md new file mode 100644 index 000000000000..a74ab59ba7b5 --- /dev/null +++ b/airbyte-scheduler/client/readme.md @@ -0,0 +1,7 @@ +# airbyte-scheduler:client + +Java clients for submitting Jobs. + +## Key Files +* `SchedulerJobClient` - interface for scheduling _asynchronous_ jobs (i.e. sync and reset). +* `SynchronousSchedulerClient` - interface for scheduling _synchronous_ jobs. diff --git a/airbyte-scheduler/models/readme.md b/airbyte-scheduler/models/readme.md new file mode 100644 index 000000000000..e6d8f82740e7 --- /dev/null +++ b/airbyte-scheduler/models/readme.md @@ -0,0 +1,8 @@ +# airbyte-scheduler:models + +This module declares models that belong to the Scheduler. + +These models are generated in the same way as `airbyte-config:models`. See that module for reference. + +## Key Files +* `Job.java` and `Attempt.java` are the top-level models in this schema. diff --git a/airbyte-scheduler/persistence/readme.md b/airbyte-scheduler/persistence/readme.md new file mode 100644 index 000000000000..2d98dbad8a88 --- /dev/null +++ b/airbyte-scheduler/persistence/readme.md @@ -0,0 +1,7 @@ +# airbyte-scheduler:persistence + +This module encapsulates the logic for the Jobs Database. This Database is primarily used by the `airbyte-scheduler` and `airbyte-workers` but it is also access from the `airbyte-server`. + +## Key Files +* `DefaultJobPersistence` is where all queries for interacting with the Jobs Database live. +* everything else is abstraction on top of that to make it easier to create / interact with / test jobs. diff --git a/airbyte-server/readme.md b/airbyte-server/readme.md new file mode 100644 index 000000000000..a6817ac3af7b --- /dev/null +++ b/airbyte-server/readme.md @@ -0,0 +1,5 @@ +# airbyte-server + +This module contains the actual app that runs the Airbyte Configuration API. The main method can be found in `ServerApp.java`. + +The external API interface that it implements is declared in `airbyte-api`. The class that actually implements that interface is called `ConfigurationApi`. You will notice that class is very large, because generates a method for every endpoint. To keep it manageable, that class just delegates all requests to more tightly-scoped, resource-based handlers. For example, the `workspace/get` endpoint is present in `ConfigurationApi`, but all it does it delegate the call to the `WorkspaceHandler` which contains all Workspace-specific logic. Unit tests for the server happen at the Handler-level, not for the `ConfigurationApi`. diff --git a/airbyte-temporal/README.md b/airbyte-temporal/README.md index 9f399c0c0980..d085652ea733 100644 --- a/airbyte-temporal/README.md +++ b/airbyte-temporal/README.md @@ -1,3 +1,7 @@ +# airbyte-temporal + +This module implements a custom version of what the Temporal autosetup image is doing. Because Temporal does not recommend the autosetup be used in production, we had to add some modifications. It ensures that the temporalDB schema will get upgraded if the temporal version is updated. + ## Testing a temporal migration `tools/bin/test_temporal_migration.sh` is available to test that a bump of the temporal version won't break the docker compose build. Here is what diff --git a/airbyte-test-utils/readme.md b/airbyte-test-utils/readme.md new file mode 100644 index 000000000000..f75ba4e74b0b --- /dev/null +++ b/airbyte-test-utils/readme.md @@ -0,0 +1,3 @@ +# airbyte-test-utils + +Shared Java code for executing TestContainers and other helpers. diff --git a/airbyte-tests/readme.md b/airbyte-tests/readme.md new file mode 100644 index 000000000000..eb75f7da3fc1 --- /dev/null +++ b/airbyte-tests/readme.md @@ -0,0 +1,5 @@ +# airbyte-tests + +This module contains two major test suites: +1. Acceptance Tests - These are feature-level tests that run as part of the build. They spin up Airbyte and test functionality by executing commands against the Airbyte Configuration API. It is possible to run them both on `docker-compose` and `kuberenetes`. We do both in the build. These tests are designed to verify that large features work in broad strokes. More detailed testing should happen in unit tests. +2. Auto Migration Acceptance Tests - These tests verify that it is possible to upgrade from older version of Airbyte (as far back as 0.17.0) all the way up to the current version. diff --git a/airbyte-webapp/README.md b/airbyte-webapp/README.md index 9ffba1459f27..39565a8e4fde 100644 --- a/airbyte-webapp/README.md +++ b/airbyte-webapp/README.md @@ -1,3 +1,7 @@ +# airbyte-webapp + +This module contains the Airbyte Webapp. It is a React app written in TypeScript. It runs in a Docker container. A very lightweight nginx server runs in that Docker container and serves the webapp. + This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). ## Available Scripts @@ -21,3 +25,7 @@ Builds the app for production to the `build` folder.
Builds the app and Docker image and tags the image with `yourtag`. Note: needs to be run from the root directory of the Airbyte project. + +## Entrypoints +* `airbyte-webapp/src/App.tsx` is the entrypoint into the OSS version of the webapp. +* `airbyte-webapp/src/packages/cloud/App.tsx` is the entrypoint into the Cloud version of the webapp. diff --git a/airbyte-workers/README.md b/airbyte-workers/README.md index 2195b1864819..221343a42631 100644 --- a/airbyte-workers/README.md +++ b/airbyte-workers/README.md @@ -1,6 +1,10 @@ -# Temporal Development +# airbyte-workers -## Versioning +This module contains the logic for how Jobs are executed. Jobs are executed using a tool called Temporal. + +## Temporal Development + +### Versioning Temporal is maintaining an internal history of the activity it runs. This history is based on a specific order. If we restart a temporal workflow with a new implementation that has a different order, the workflow will be stuck and will need manual action to be properly restarted. Temporal provides @@ -38,7 +42,7 @@ if (version <= 4 && version >= MINIMAL_VERSION) { } ``` -## Removing a version +### Removing a version Removing a version is a potential breaking change and should be done version carefully. We should maintain a MINIMAL_VERSION to keep track of the current minimal version. Both MINIMAL_VERSION and CURRENT_VERSION needs to be present on the workflow file even if they are unused (if they have been diff --git a/buildSrc/readme.md b/buildSrc/readme.md new file mode 100644 index 000000000000..1dea0b8d527f --- /dev/null +++ b/buildSrc/readme.md @@ -0,0 +1,3 @@ +# buildSrc + +This module contains custom Gradle modules that we have written to improve our build. diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 4b3975fc6540..3a6e76411ab9 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -1,5 +1,7 @@ # airbyte +Helm charts for Airbyte. + ## Parameters ### Global Parameters diff --git a/docs/readme.md b/docs/readme.md new file mode 100644 index 000000000000..68f26f6353a1 --- /dev/null +++ b/docs/readme.md @@ -0,0 +1,3 @@ +# docs (by Gitbook) + +This directory contains our docs that are hosted at docs.airbyte.io. We leverage a tool called Gitbook. For instructions on how to work with our docs, check out this [article](https://docs.airbyte.io/contributing-to-airbyte/updating-documentation#workflow-for-updating-docs). diff --git a/tools/README.md b/tools/README.md index 98ac0b70fb0d..fd241e669dd4 100644 --- a/tools/README.md +++ b/tools/README.md @@ -1,5 +1,7 @@ # Tools +Contains various tools (usually bash scripts) to improve quality of life or the build system. + ## Releasing a new version ``` Trigger the Github Action Release Open Source Airbyte (https://github.com/airbytehq/airbyte/actions/workflows/release-airbyte-os.yml) From f6bd217de1ddc1ee81cb8b2f98454e721918d115 Mon Sep 17 00:00:00 2001 From: Charles Date: Sun, 13 Mar 2022 14:49:29 -0700 Subject: [PATCH 35/38] Document where creds can be found for logging integration tests (#8733) --- .github/workflows/gradle.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 0b3ed9d47136..16d98ab7875c 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -655,13 +655,16 @@ jobs: - name: Run Logging Tests run: ./tools/bin/cloud_storage_logging_test.sh env: + # AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} + # GOOGLE_CLOUD_STORAGE_TEST_CREDS can be found in LastPass as "google cloud storage ( gcs ) test creds" GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }} - name: Run Kubernetes End-to-End Acceptance Tests env: USER: root HOME: /home/runner + # AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }} SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }} @@ -683,6 +686,7 @@ jobs: env: USER: root HOME: /home/runner + # AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }} SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }} @@ -805,6 +809,7 @@ jobs: env: USER: root HOME: /home/runner + # AWS_S3_INTEGRATION_TEST_CREDS can be found in LastPass as AWS_S3_INTEGRATION_TEST_CREDS AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} SECRET_STORE_GCP_CREDENTIALS: ${{ secrets.SECRET_STORE_GCP_CREDENTIALS }} SECRET_STORE_GCP_PROJECT_ID: ${{ secrets.SECRET_STORE_GCP_PROJECT_ID }} From 3563452383233a534ecf4604e462feb6676c5367 Mon Sep 17 00:00:00 2001 From: Charles Date: Sun, 13 Mar 2022 14:54:44 -0700 Subject: [PATCH 36/38] add helper method for creating postgres db (#6244) --- airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java index 3fb65f85f73b..28dd36cde362 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java @@ -28,6 +28,14 @@ public class Databases { private static final Logger LOGGER = LoggerFactory.getLogger(Databases.class); private static final long DEFAULT_WAIT_MS = 5 * 1000; + public static Database createPostgresDatabase(final String username, + final String password, + final String host, + final int port, + final String database) { + return createPostgresDatabase(username, password, String.format("jdbc:postgresql://%s:%s/%s", host, port, database)); + } + public static Database createPostgresDatabase(final String username, final String password, final String jdbcConnectionString) { return createDatabase(username, password, jdbcConnectionString, "org.postgresql.Driver", SQLDialect.POSTGRES); } From 3dacb6d9b0f2330eda9ef34d075920671e9528d8 Mon Sep 17 00:00:00 2001 From: terencecho Date: Fri, 4 Mar 2022 16:54:52 -0800 Subject: [PATCH 37/38] Add Disable Failing Connections feature --- .../ConnectionManagerWorkflowImpl.java | 6 + .../activities/DisableActivity.java | 35 +++++ .../activities/DisableActivityImpl.java | 72 ++++++++++ .../activities/DisableActivityTest.java | 127 ++++++++++++++++++ docker-compose.yaml | 1 + 5 files changed, 241 insertions(+) create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java create mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java create mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index c654f034880b..f7db620aa813 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -4,6 +4,8 @@ package io.airbyte.workers.temporal.scheduling; +import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.features.FeatureFlags; import io.airbyte.config.FailureReason; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; @@ -18,6 +20,8 @@ import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity.ConnectionDeletionInput; +import io.airbyte.workers.temporal.scheduling.activities.DisableActivity; +import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; @@ -77,6 +81,8 @@ public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow private CancellationScope cancellableSyncWorkflow; + final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); + private UUID connectionId; public ConnectionManagerWorkflowImpl() {} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java new file mode 100644 index 000000000000..2835f3ff37db --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling.activities; + +import io.temporal.activity.ActivityInterface; +import io.temporal.activity.ActivityMethod; +import java.time.Instant; +import java.util.UUID; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +@ActivityInterface +public interface DisableActivity { + + @Data + @NoArgsConstructor + @AllArgsConstructor + class DisableActivityInput { + + private UUID connectionId; + + private Instant currTimestamp; + + } + + /** + * Delete a connection + */ + @ActivityMethod + void disableConnection(DisableActivityInput input); + +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java new file mode 100644 index 000000000000..52ce1581b075 --- /dev/null +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling.activities; + +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.config.JobConfig.ConfigType; +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardSync.Status; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.scheduler.models.Job; +import io.airbyte.scheduler.models.JobStatus; +import io.airbyte.scheduler.persistence.JobPersistence; +import io.airbyte.workers.temporal.exception.RetryableException; +import java.time.temporal.ChronoUnit; +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@AllArgsConstructor +@Slf4j +public class DisableActivityImpl implements DisableActivity { + + @VisibleForTesting + public static final int MAX_FAILURE_JOBS_IN_A_ROW = 100; + @VisibleForTesting + public static final int MAX_DAYS_OF_STRAIGHT_FAILURE = 14; + + private final ConfigRepository configRepository; + private JobPersistence jobPersistence; + + // if no successful sync jobs in the last MAX_FAILURE_JOBS_IN_A_ROW job attempts or the last + // MAX_DAYS_OF_STRAIGHT_FAILURE days (minimum 1 job attempt): disable connection to prevent wasting + // resources + + @Override + public void disableConnection(final DisableActivityInput input) { + try { + // lists job in descending order by created_at + final List jobs = jobPersistence.listJobs(ConfigType.SYNC, + input.getCurrTimestamp().minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS)); + if (jobs.size() == 0) + return; + + boolean shouldDisable = true; + int numFailures = 0; + + // jobs are sorted by jobs' createdAt in ascending order + for (int i = jobs.size() - 1; i >= 0; i--) { + final JobStatus jobStatus = jobs.get(i).getStatus(); + if (jobStatus == JobStatus.FAILED) { + numFailures++; + if (numFailures == MAX_FAILURE_JOBS_IN_A_ROW) + break; + } else if (jobStatus == JobStatus.SUCCEEDED) { + shouldDisable = false; + break; + } + } + + if (shouldDisable) { + final StandardSync standardSync = configRepository.getStandardSync(input.getConnectionId()); + standardSync.setStatus(Status.INACTIVE); + configRepository.writeStandardSync(standardSync); + } + } catch (final Exception e) { + throw new RetryableException(e); + } + } + +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java new file mode 100644 index 000000000000..d1e7afab3a8f --- /dev/null +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal.scheduling.activities; + +import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_DAYS_OF_STRAIGHT_FAILURE; +import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_FAILURE_JOBS_IN_A_ROW; + +import io.airbyte.config.JobConfig.ConfigType; +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardSync.Status; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.scheduler.models.Job; +import io.airbyte.scheduler.models.JobStatus; +import io.airbyte.scheduler.persistence.JobPersistence; +import io.airbyte.validation.json.JsonValidationException; +import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class DisableActivityTest { + + @Mock + private ConfigRepository mConfigRepository; + + @Mock + private JobPersistence mJobPersistence; + + @Mock + private Job mJobFailure; + + @Mock + private Job mJobSuccess; + + @InjectMocks + private DisableActivityImpl disableActivity; + + private final static UUID connectionId = UUID.randomUUID(); + private final static StandardSync standardSync = new StandardSync(); + private static final Instant currInstant = Instant.now(); + private static final DisableActivityInput input = new DisableActivityInput(connectionId, currInstant); + + @BeforeEach + void setUp() { + standardSync.setStatus(Status.ACTIVE); + } + + @Nested + class DisableConnectionTest { + + @Test + @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") + public void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { + // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW failures + final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); + jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, mJobFailure)); + + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(jobs); + Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); + Mockito.when(mConfigRepository.getStandardSync(connectionId)) + .thenReturn(standardSync); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); + } + + @Test + @DisplayName("Test that the connection is _not_ disabled after MAX_FAILURE_JOBS_IN_A_ROW - 1 straight failures") + public void testLessThanMaxFailuresInARow() throws IOException { + // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW-1 failures + final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); + jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, mJobFailure)); + + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(jobs); + Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); + Mockito.when(mJobSuccess.getStatus()).thenReturn(JobStatus.SUCCEEDED); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); + } + + @Test + @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") + public void testNoRuns() throws IOException { + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(Collections.emptyList()); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); + } + + @Test + @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") + public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { + Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) + .thenReturn(Collections.singletonList(mJobFailure)); + Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); + Mockito.when(mConfigRepository.getStandardSync(connectionId)) + .thenReturn(standardSync); + + disableActivity.disableConnection(input); + Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); + } + + } + +} diff --git a/docker-compose.yaml b/docker-compose.yaml index 303954df5893..7c4ea3451cf4 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -101,6 +101,7 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_URL=${DATABASE_URL} - DATABASE_USER=${DATABASE_USER} + - DISABLE_FAILING_CONNECTIONS=${DISABLE_FAILING_CONNECTIONS} - JOB_MAIN_CONTAINER_CPU_LIMIT=${JOB_MAIN_CONTAINER_CPU_LIMIT} - JOB_MAIN_CONTAINER_CPU_REQUEST=${JOB_MAIN_CONTAINER_CPU_REQUEST} - JOB_MAIN_CONTAINER_MEMORY_LIMIT=${JOB_MAIN_CONTAINER_MEMORY_LIMIT} From 6c22bfe2bec3478891a634641c3aa96c4bbf5680 Mon Sep 17 00:00:00 2001 From: terencecho Date: Mon, 7 Mar 2022 14:09:40 -0800 Subject: [PATCH 38/38] Rename and cleanup --- .../ConnectionManagerWorkflowImpl.java | 2 - .../activities/DisableActivity.java | 35 ----- .../activities/DisableActivityImpl.java | 72 ---------- .../activities/DisableActivityTest.java | 127 ------------------ docker-compose.yaml | 1 - 5 files changed, 237 deletions(-) delete mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java delete mode 100644 airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java delete mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index f7db620aa813..64aad7422471 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -20,8 +20,6 @@ import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity.ScheduleRetrieverOutput; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity; import io.airbyte.workers.temporal.scheduling.activities.ConnectionDeletionActivity.ConnectionDeletionInput; -import io.airbyte.workers.temporal.scheduling.activities.DisableActivity; -import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.GeneratedJobInput; import io.airbyte.workers.temporal.scheduling.activities.GenerateInputActivity.SyncInput; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java deleted file mode 100644 index 2835f3ff37db..000000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivity.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2021 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling.activities; - -import io.temporal.activity.ActivityInterface; -import io.temporal.activity.ActivityMethod; -import java.time.Instant; -import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -@ActivityInterface -public interface DisableActivity { - - @Data - @NoArgsConstructor - @AllArgsConstructor - class DisableActivityInput { - - private UUID connectionId; - - private Instant currTimestamp; - - } - - /** - * Delete a connection - */ - @ActivityMethod - void disableConnection(DisableActivityInput input); - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java deleted file mode 100644 index 52ce1581b075..000000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityImpl.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2021 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling.activities; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.scheduler.models.Job; -import io.airbyte.scheduler.models.JobStatus; -import io.airbyte.scheduler.persistence.JobPersistence; -import io.airbyte.workers.temporal.exception.RetryableException; -import java.time.temporal.ChronoUnit; -import java.util.List; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@AllArgsConstructor -@Slf4j -public class DisableActivityImpl implements DisableActivity { - - @VisibleForTesting - public static final int MAX_FAILURE_JOBS_IN_A_ROW = 100; - @VisibleForTesting - public static final int MAX_DAYS_OF_STRAIGHT_FAILURE = 14; - - private final ConfigRepository configRepository; - private JobPersistence jobPersistence; - - // if no successful sync jobs in the last MAX_FAILURE_JOBS_IN_A_ROW job attempts or the last - // MAX_DAYS_OF_STRAIGHT_FAILURE days (minimum 1 job attempt): disable connection to prevent wasting - // resources - - @Override - public void disableConnection(final DisableActivityInput input) { - try { - // lists job in descending order by created_at - final List jobs = jobPersistence.listJobs(ConfigType.SYNC, - input.getCurrTimestamp().minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS)); - if (jobs.size() == 0) - return; - - boolean shouldDisable = true; - int numFailures = 0; - - // jobs are sorted by jobs' createdAt in ascending order - for (int i = jobs.size() - 1; i >= 0; i--) { - final JobStatus jobStatus = jobs.get(i).getStatus(); - if (jobStatus == JobStatus.FAILED) { - numFailures++; - if (numFailures == MAX_FAILURE_JOBS_IN_A_ROW) - break; - } else if (jobStatus == JobStatus.SUCCEEDED) { - shouldDisable = false; - break; - } - } - - if (shouldDisable) { - final StandardSync standardSync = configRepository.getStandardSync(input.getConnectionId()); - standardSync.setStatus(Status.INACTIVE); - configRepository.writeStandardSync(standardSync); - } - } catch (final Exception e) { - throw new RetryableException(e); - } - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java deleted file mode 100644 index d1e7afab3a8f..000000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/DisableActivityTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2021 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling.activities; - -import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_DAYS_OF_STRAIGHT_FAILURE; -import static io.airbyte.workers.temporal.scheduling.activities.DisableActivityImpl.MAX_FAILURE_JOBS_IN_A_ROW; - -import io.airbyte.config.JobConfig.ConfigType; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSync.Status; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.scheduler.models.Job; -import io.airbyte.scheduler.models.JobStatus; -import io.airbyte.scheduler.persistence.JobPersistence; -import io.airbyte.validation.json.JsonValidationException; -import io.airbyte.workers.temporal.scheduling.activities.DisableActivity.DisableActivityInput; -import java.io.IOException; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class DisableActivityTest { - - @Mock - private ConfigRepository mConfigRepository; - - @Mock - private JobPersistence mJobPersistence; - - @Mock - private Job mJobFailure; - - @Mock - private Job mJobSuccess; - - @InjectMocks - private DisableActivityImpl disableActivity; - - private final static UUID connectionId = UUID.randomUUID(); - private final static StandardSync standardSync = new StandardSync(); - private static final Instant currInstant = Instant.now(); - private static final DisableActivityInput input = new DisableActivityInput(connectionId, currInstant); - - @BeforeEach - void setUp() { - standardSync.setStatus(Status.ACTIVE); - } - - @Nested - class DisableConnectionTest { - - @Test - @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") - public void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { - // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW failures - final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); - jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, mJobFailure)); - - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(jobs); - Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); - Mockito.when(mConfigRepository.getStandardSync(connectionId)) - .thenReturn(standardSync); - - disableActivity.disableConnection(input); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); - } - - @Test - @DisplayName("Test that the connection is _not_ disabled after MAX_FAILURE_JOBS_IN_A_ROW - 1 straight failures") - public void testLessThanMaxFailuresInARow() throws IOException { - // 1 success followed by MAX_FAILURE_JOBS_IN_A_ROW-1 failures - final List jobs = new ArrayList<>(Collections.singletonList(mJobSuccess)); - jobs.addAll(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, mJobFailure)); - - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(jobs); - Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); - Mockito.when(mJobSuccess.getStatus()).thenReturn(JobStatus.SUCCEEDED); - - disableActivity.disableConnection(input); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); - } - - @Test - @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") - public void testNoRuns() throws IOException { - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(Collections.emptyList()); - - disableActivity.disableConnection(input); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); - } - - @Test - @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") - public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(mJobPersistence.listJobs(ConfigType.SYNC, currInstant.minus(MAX_DAYS_OF_STRAIGHT_FAILURE, ChronoUnit.DAYS))) - .thenReturn(Collections.singletonList(mJobFailure)); - Mockito.when(mJobFailure.getStatus()).thenReturn(JobStatus.FAILED); - Mockito.when(mConfigRepository.getStandardSync(connectionId)) - .thenReturn(standardSync); - - disableActivity.disableConnection(input); - Assertions.assertThat(standardSync.getStatus()).isEqualTo(Status.INACTIVE); - } - - } - -} diff --git a/docker-compose.yaml b/docker-compose.yaml index 7c4ea3451cf4..303954df5893 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -101,7 +101,6 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_URL=${DATABASE_URL} - DATABASE_USER=${DATABASE_USER} - - DISABLE_FAILING_CONNECTIONS=${DISABLE_FAILING_CONNECTIONS} - JOB_MAIN_CONTAINER_CPU_LIMIT=${JOB_MAIN_CONTAINER_CPU_LIMIT} - JOB_MAIN_CONTAINER_CPU_REQUEST=${JOB_MAIN_CONTAINER_CPU_REQUEST} - JOB_MAIN_CONTAINER_MEMORY_LIMIT=${JOB_MAIN_CONTAINER_MEMORY_LIMIT}