Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

java postgres destination #527

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,19 +22,26 @@
* SOFTWARE.
*/

package io.airbyte.scheduler;
package io.airbyte.commons.concurrency;

import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class SchedulerShutdownHandler extends Thread {
public class GracefulShutdownHandler extends Thread {

private static final Logger LOGGER = LoggerFactory.getLogger(SchedulerShutdownHandler.class);
private static final Logger LOGGER = LoggerFactory.getLogger(GracefulShutdownHandler.class);
private final long terminationWaitTime;
private final TimeUnit terminateWaitTimeUnits;
private final ExecutorService[] threadPools;

public SchedulerShutdownHandler(final ExecutorService... threadPools) {
public GracefulShutdownHandler(
long terminationWaitTime,
TimeUnit terminateWaitTimeUnits,
final ExecutorService... threadPools) {
this.terminationWaitTime = terminationWaitTime;
this.terminateWaitTimeUnits = terminateWaitTimeUnits;
this.threadPools = threadPools;
}

Expand All @@ -44,11 +51,11 @@ public void run() {
threadPool.shutdown();

try {
if (!threadPool.awaitTermination(30, TimeUnit.SECONDS)) {
LOGGER.error("Unable to kill worker threads by shutdown timeout.");
if (!threadPool.awaitTermination(terminationWaitTime, terminateWaitTimeUnits)) {
LOGGER.error("Unable to kill threads by shutdown timeout.");
}
} catch (InterruptedException e) {
LOGGER.error("Wait for graceful worker thread shutdown interrupted.", e);
LOGGER.error("Wait for graceful thread shutdown interrupted.", e);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import java.io.IOException;
import java.util.Optional;

Expand Down Expand Up @@ -109,4 +110,8 @@ public static <T> T clone(final T object) {
return (T) deserialize(serialize(object), object.getClass());
}

public static byte[] toBytes(JsonNode jsonNode) {
return serialize(jsonNode).getBytes(Charsets.UTF_8);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -22,23 +22,23 @@
* SOFTWARE.
*/

package io.airbyte.scheduler;
package io.airbyte.commons.concurrency;

import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;

import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.Test;

class SchedulerShutdownHandlerTest {
class GracefulShutdownHandlerTest {

@Test
public void testRun() throws InterruptedException {
final ExecutorService executorService = mock(ExecutorService.class);
final SchedulerShutdownHandler schedulerShutdownHandler = new SchedulerShutdownHandler(executorService);
schedulerShutdownHandler.start();
schedulerShutdownHandler.join();
final GracefulShutdownHandler gracefulShutdownHandler = new GracefulShutdownHandler(30, TimeUnit.SECONDS, executorService);
gracefulShutdownHandler.start();
gracefulShutdownHandler.join();

verify(executorService).shutdown();
}
Expand Down
20 changes: 16 additions & 4 deletions airbyte-db/src/main/java/io/airbyte/db/DatabaseHelper.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,14 @@
import org.jooq.DSLContext;
import org.jooq.SQLDialect;
import org.jooq.impl.DSL;
import org.jooq.*;
import org.jooq.impl.*;

public class DatabaseHelper {

public static BasicDataSource getConnectionPool(String username,
String password,
String jdbcConnectionString) {
String password,
String jdbcConnectionString) {

BasicDataSource connectionPool = new BasicDataSource();
connectionPool.setDriverClassName("org.postgresql.Driver");
Expand All @@ -46,14 +48,24 @@ public static BasicDataSource getConnectionPool(String username,
return connectionPool;
}

public static <T> T query(BasicDataSource connectionPool, ContextQueryFunction<T> transform)
throws SQLException {
public static <T> T query(BasicDataSource connectionPool, ContextQueryFunction<T> transform) throws SQLException {

try (Connection connection = connectionPool.getConnection()) {
DSLContext context = getContext(connection);
return transform.apply(context);
}
}

public static <T> T transaction(BasicDataSource connectionPool, ContextQueryFunction<T> transform) throws SQLException {
try (Connection connection = connectionPool.getConnection()) {
DSLContext context = getContext(connection);
return context.transactionResult(configuration -> {
DSLContext transactionContext = DSL.using(configuration);
return transform.apply(transactionContext);
});
}
}

private static DSLContext getContext(Connection connection) {
return DSL.using(connection, SQLDialect.POSTGRES);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,215 @@
/*
* MIT License
*
* Copyright (c) 2020 Airbyte
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/

package io.airbyte.integrations.destination.postgres;

import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Preconditions;
import io.airbyte.commons.json.Jsons;
import io.airbyte.commons.resources.MoreResources;
import io.airbyte.config.DestinationConnectionSpecification;
import io.airbyte.config.Schema;
import io.airbyte.config.StandardCheckConnectionOutput;
import io.airbyte.config.StandardCheckConnectionOutput.Status;
import io.airbyte.config.StandardDiscoverSchemaOutput;
import io.airbyte.config.Stream;
import io.airbyte.integrations.base.Destination;
import io.airbyte.integrations.base.DestinationConsumer;
import io.airbyte.integrations.base.FailureTrackingConsumer;
import io.airbyte.integrations.base.IntegrationRunner;
import io.airbyte.singer.SingerMessage;
import io.airbyte.singer.SingerMessage.Type;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.time.Instant;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class CsvDestination implements Destination {

private static final Logger LOGGER = LoggerFactory.getLogger(CsvDestination.class);

static final String COLUMN_NAME = "data"; // we output all data as a blob to a single column.
static final String DESTINATION_PATH_FIELD = "destination_path";

@Override
public DestinationConnectionSpecification spec() throws IOException {
final String resourceString = MoreResources.readResource("spec.json");
return Jsons.deserialize(resourceString, DestinationConnectionSpecification.class);
}

@Override
public StandardCheckConnectionOutput check(JsonNode config) {
try {
FileUtils.forceMkdir(getDestinationPath(config).toFile());
} catch (Exception e) {
return new StandardCheckConnectionOutput().withStatus(Status.FAILURE).withMessage(e.getMessage());
}
return new StandardCheckConnectionOutput().withStatus(Status.SUCCESS);
}

// todo (cgardens) - we currently don't leverage discover in our destinations, so skipping
// implementing it... for now.
@Override
public StandardDiscoverSchemaOutput discover(JsonNode config) {
throw new RuntimeException("Not Implemented");
}

/**
* @param config - csv destination config.
* @param schema - schema of the incoming messages.
* @return - a consumer to handle writing records to the filesystem.
* @throws IOException - exception throw in manipulating the filesytem.
*/
@Override
public DestinationConsumer<SingerMessage> write(JsonNode config, Schema schema) throws IOException {
final Path destinationDir = getDestinationPath(config);

FileUtils.forceMkdir(destinationDir.toFile());

final long now = Instant.now().toEpochMilli();
final Map<String, WriteConfig> writeConfigs = new HashMap<>();
for (final Stream stream : schema.getStreams()) {
final Path tmpPath = destinationDir.resolve(stream.getName() + "_" + now + ".csv");
final Path finalPath = destinationDir.resolve(stream.getName() + ".csv");
final FileWriter fileWriter = new FileWriter(tmpPath.toFile());
final CSVPrinter printer = new CSVPrinter(fileWriter, CSVFormat.DEFAULT.withHeader(COLUMN_NAME));
writeConfigs.put(stream.getName(), new WriteConfig(printer, tmpPath, finalPath));
}

return new CsvConsumer(writeConfigs, schema);
}

/**
* Extract provided relative path from csv config object and append to local mount path.
*
* @param config - csv config object
* @return absolute path with the relative path appended to the local volume mount.
*/
private Path getDestinationPath(JsonNode config) {
final String destinationRelativePath = config.get(DESTINATION_PATH_FIELD).asText();
Preconditions.checkNotNull(destinationRelativePath);

return Path.of(destinationRelativePath);
}

/**
* This consumer writes individual records to temporary files. If all of the messages are written
* successfully, it moves the tmp files to files named by their respective stream. If there are any
* failures, nothing is written.
*/
private static class CsvConsumer extends FailureTrackingConsumer<SingerMessage> {

private final Map<String, WriteConfig> writeConfigs;
private final Schema schema;

public CsvConsumer(Map<String, WriteConfig> writeConfigs, Schema schema) {
this.schema = schema;
LOGGER.info("initializing consumer.");

this.writeConfigs = writeConfigs;
}

@Override
protected void acceptTracked(SingerMessage singerMessage) throws Exception {

// ignore other message types.
if (singerMessage.getType() == Type.RECORD) {
if (!writeConfigs.containsKey(singerMessage.getStream())) {
throw new IllegalArgumentException(
String.format("Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s",
Jsons.serialize(schema), Jsons.serialize(singerMessage)));
}

writeConfigs.get(singerMessage.getStream()).getWriter().printRecord(Jsons.serialize(singerMessage.getRecord()));
}
}

@Override
protected void close(boolean hasFailed) throws IOException {
LOGGER.info("finalizing consumer.");

for (final Map.Entry<String, WriteConfig> entries : writeConfigs.entrySet()) {
try {
entries.getValue().getWriter().flush();
entries.getValue().getWriter().close();
} catch (Exception e) {
hasFailed = true;
LOGGER.error("failed to close writer for: {}.", entries.getKey());
}
}
// do not persist the data, if there are any failures.
if (!hasFailed) {
for (final WriteConfig writeConfig : writeConfigs.values()) {
Files.move(writeConfig.getTmpPath(), writeConfig.getFinalPath(), StandardCopyOption.REPLACE_EXISTING);
}
}
// clean up tmp files.
for (final WriteConfig writeConfig : writeConfigs.values()) {
Files.deleteIfExists(writeConfig.getTmpPath());
}

}

}

private static class WriteConfig {

private final CSVPrinter writer;
private final Path tmpPath;
private final Path finalPath;

public WriteConfig(CSVPrinter writer, Path tmpPath, Path finalPath) {
this.writer = writer;
this.tmpPath = tmpPath;
this.finalPath = finalPath;
}

public CSVPrinter getWriter() {
return writer;
}

public Path getTmpPath() {
return tmpPath;
}

public Path getFinalPath() {
return finalPath;
}

}

public static void main(String[] args) throws Exception {
new IntegrationRunner(new CsvDestination()).run(args);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ protected JsonNode getInvalidConfig() {
}

@Override
protected List<JsonNode> recordRetriever(TestDestinationEnv testEnv) throws Exception {
protected List<JsonNode> recordRetriever(TestDestinationEnv testEnv, String streamName) throws Exception {
final List<Path> list = Files.list(testEnv.getLocalRoot().resolve(RELATIVE_PATH)).collect(Collectors.toList());
assertEquals(1, list.size());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ public abstract class TestDestination {
* @return All of the records in the destination at the time this method is invoked.
* @throws Exception - can throw any exception, test framework will handle.
*/
protected abstract List<JsonNode> recordRetriever(TestDestinationEnv testEnv) throws Exception;
protected abstract List<JsonNode> recordRetriever(TestDestinationEnv testEnv, String streamName) throws Exception;

/**
* Function that performs any setup of external resources required for the test. e.g. instantiate a
Expand Down Expand Up @@ -182,7 +182,7 @@ void testSync(String messagesFilename, String catalogFilename) throws Exception
.map(record -> Jsons.deserialize(record, SingerMessage.class)).collect(Collectors.toList());
runSync(messages, catalog);

assertSameMessages(messages, recordRetriever(testEnv));
assertSameMessages(messages, recordRetriever(testEnv, catalog.getStreams().get(0).getName()));
}

/**
Expand All @@ -202,7 +202,7 @@ void testSecondSync() throws Exception {
.put("HKD", 10)
.put("NZD", 700)));
runSync(secondSyncMessages, catalog);
assertSameMessages(secondSyncMessages, recordRetriever(testEnv));
assertSameMessages(secondSyncMessages, recordRetriever(testEnv, catalog.getStreams().get(0).getName()));
}

private void runSync(List<SingerMessage> messages, Schema catalog) throws IOException, WorkerException {
Expand Down
4 changes: 2 additions & 2 deletions airbyte-integrations/java-template-destination/readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
1. e.g.
```
mkdir -p airbyte-integrations/bigquery-destination/src/main/java/io/airbyte/integrations/destination/bigquery
mv airbyte-integrations/java-template-destination/src/main/java/io/airbyte/integrations/destination/template/DestinationTemplate.java airbyte-integrations/bigquery-destination/src/main/java/io/airbyte/integrations/destination/bigquery/DestinationTemplate.java
rm -r airbyte-integrations/java-template-destination/src/main/java/io/airbyte/integrations/destination/template
mv airbyte-integrations/bigquery-destination/src/main/java/io/airbyte/integrations/destination/template/DestinationTemplate.java airbyte-integrations/bigquery-destination/src/main/java/io/airbyte/integrations/destination/bigquery/DestinationTemplate.java
rm -r airbyte-integrations/bigquery-destination/src/main/java/io/airbyte/integrations/destination/template
```
1. Rename the template class to an appropriate name for your integration.
1. e.g. `DestinationTemplate.java` to `BigQueryDestination.java`.
Expand Down
Loading