🐞 Snowflake destination: use pooled connections (#10342)
* Use data source conn supplier for snowflake database * Format code * Reuse the same database in integration tests * Close query stream * Refactor snowflake staging sql operations * Close result set * Add annotations * Bump version * Bump version in seed
This commit is contained in:
@@ -185,7 +185,7 @@
|
||||
- name: Snowflake
|
||||
destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
|
||||
dockerRepository: airbyte/destination-snowflake
|
||||
dockerImageTag: 0.4.11
|
||||
dockerImageTag: 0.4.12
|
||||
documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake
|
||||
icon: snowflake.svg
|
||||
- name: MariaDB ColumnStore
|
||||
|
||||
@@ -3817,7 +3817,7 @@
|
||||
supported_destination_sync_modes:
|
||||
- "overwrite"
|
||||
- "append"
|
||||
- dockerImage: "airbyte/destination-snowflake:0.4.11"
|
||||
- dockerImage: "airbyte/destination-snowflake:0.4.12"
|
||||
spec:
|
||||
documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake"
|
||||
connectionSpecification:
|
||||
|
||||
@@ -14,6 +14,7 @@ import io.airbyte.db.jdbc.JdbcUtils;
|
||||
import io.airbyte.db.jdbc.StreamingJdbcDatabase;
|
||||
import io.airbyte.db.mongodb.MongoDatabase;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import lombok.val;
|
||||
@@ -198,6 +199,10 @@ public class Databases {
|
||||
Optional.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Prefer to use the method that takes in the connection properties as a map.
|
||||
*/
|
||||
@Deprecated
|
||||
private static BasicDataSource createBasicDataSource(final String username,
|
||||
final String password,
|
||||
final String jdbcConnectionString,
|
||||
@@ -214,6 +219,22 @@ public class Databases {
|
||||
return connectionPool;
|
||||
}
|
||||
|
||||
public static BasicDataSource createBasicDataSource(final String username,
|
||||
final String password,
|
||||
final String jdbcConnectionString,
|
||||
final String driverClassName,
|
||||
final Map<String, String> connectionProperties) {
|
||||
final BasicDataSource connectionPool = new BasicDataSource();
|
||||
connectionPool.setDriverClassName(driverClassName);
|
||||
connectionPool.setUsername(username);
|
||||
connectionPool.setPassword(password);
|
||||
connectionPool.setInitialSize(0);
|
||||
connectionPool.setMaxTotal(5);
|
||||
connectionPool.setUrl(jdbcConnectionString);
|
||||
connectionProperties.forEach(connectionPool::addConnectionProperty);
|
||||
return connectionPool;
|
||||
}
|
||||
|
||||
public static BigQueryDatabase createBigQueryDatabase(final String projectId, final String jsonCreds) {
|
||||
return new BigQueryDatabase(projectId, jsonCreds);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
/*
|
||||
* Copyright (c) 2021 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.db.jdbc;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
|
||||
public interface CloseableConnectionSupplier extends AutoCloseable {
|
||||
|
||||
Connection getConnection() throws SQLException;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright (c) 2021 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.db.jdbc;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
public class DataSourceConnectionSupplier implements CloseableConnectionSupplier {
|
||||
|
||||
private final DataSource dataSource;
|
||||
|
||||
public DataSourceConnectionSupplier(final DataSource dataSource) {
|
||||
this.dataSource = dataSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Connection getConnection() throws SQLException {
|
||||
return dataSource.getConnection();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
// Just a safety in case we are using a datasource implementation that requires closing.
|
||||
// BasicDataSource from apache does since it also provides a pooling mechanism to reuse connections.
|
||||
|
||||
if (dataSource instanceof AutoCloseable) {
|
||||
((AutoCloseable) dataSource).close();
|
||||
}
|
||||
if (dataSource instanceof Closeable) {
|
||||
((Closeable) dataSource).close();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -4,10 +4,10 @@
|
||||
|
||||
package io.airbyte.db.jdbc;
|
||||
|
||||
import com.google.errorprone.annotations.MustBeClosed;
|
||||
import io.airbyte.commons.functional.CheckedConsumer;
|
||||
import io.airbyte.commons.functional.CheckedFunction;
|
||||
import io.airbyte.db.JdbcCompatibleSourceOperations;
|
||||
import java.io.Closeable;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.PreparedStatement;
|
||||
@@ -42,11 +42,6 @@ public class DefaultJdbcDatabase extends JdbcDatabase {
|
||||
this.connectionSupplier = connectionSupplier;
|
||||
}
|
||||
|
||||
public DefaultJdbcDatabase(final CloseableConnectionSupplier connectionSupplier) {
|
||||
super(JdbcUtils.getDefaultSourceOperations());
|
||||
this.connectionSupplier = connectionSupplier;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(final CheckedConsumer<Connection, SQLException> query) throws SQLException {
|
||||
try (final Connection connection = connectionSupplier.getConnection()) {
|
||||
@@ -58,12 +53,14 @@ public class DefaultJdbcDatabase extends JdbcDatabase {
|
||||
public <T> List<T> bufferedResultSetQuery(final CheckedFunction<Connection, ResultSet, SQLException> query,
|
||||
final CheckedFunction<ResultSet, T, SQLException> recordTransform)
|
||||
throws SQLException {
|
||||
try (final Connection connection = connectionSupplier.getConnection()) {
|
||||
return toStream(query.apply(connection), recordTransform).collect(Collectors.toList());
|
||||
try (final Connection connection = connectionSupplier.getConnection();
|
||||
final Stream<T> results = toStream(query.apply(connection), recordTransform)) {
|
||||
return results.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@MustBeClosed
|
||||
public <T> Stream<T> resultSetQuery(final CheckedFunction<Connection, ResultSet, SQLException> query,
|
||||
final CheckedFunction<ResultSet, T, SQLException> recordTransform)
|
||||
throws SQLException {
|
||||
@@ -101,6 +98,7 @@ public class DefaultJdbcDatabase extends JdbcDatabase {
|
||||
* @throws SQLException SQL related exceptions.
|
||||
*/
|
||||
@Override
|
||||
@MustBeClosed
|
||||
public <T> Stream<T> query(final CheckedFunction<Connection, PreparedStatement, SQLException> statementCreator,
|
||||
final CheckedFunction<ResultSet, T, SQLException> recordTransform)
|
||||
throws SQLException {
|
||||
@@ -121,38 +119,4 @@ public class DefaultJdbcDatabase extends JdbcDatabase {
|
||||
connectionSupplier.close();
|
||||
}
|
||||
|
||||
public interface CloseableConnectionSupplier extends AutoCloseable {
|
||||
|
||||
Connection getConnection() throws SQLException;
|
||||
|
||||
}
|
||||
|
||||
public static final class DataSourceConnectionSupplier implements CloseableConnectionSupplier {
|
||||
|
||||
private final DataSource dataSource;
|
||||
|
||||
public DataSourceConnectionSupplier(final DataSource dataSource) {
|
||||
this.dataSource = dataSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Connection getConnection() throws SQLException {
|
||||
return dataSource.getConnection();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
// Just a safety in case we are using a datasource implementation that requires closing.
|
||||
// BasicDataSource from apache does since it also provides a pooling mechanism to reuse connections.
|
||||
|
||||
if (dataSource instanceof AutoCloseable) {
|
||||
((AutoCloseable) dataSource).close();
|
||||
}
|
||||
if (dataSource instanceof Closeable) {
|
||||
((Closeable) dataSource).close();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
package io.airbyte.db.jdbc;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.google.errorprone.annotations.MustBeClosed;
|
||||
import io.airbyte.commons.functional.CheckedConsumer;
|
||||
import io.airbyte.commons.functional.CheckedFunction;
|
||||
import io.airbyte.db.JdbcCompatibleSourceOperations;
|
||||
@@ -65,13 +66,15 @@ public abstract class JdbcDatabase extends SqlDatabase {
|
||||
* @param <T> type that each record will be mapped to
|
||||
* @return stream of records that the result set is mapped to.
|
||||
*/
|
||||
public static <T> Stream<T> toStream(final ResultSet resultSet, final CheckedFunction<ResultSet, T, SQLException> mapper) {
|
||||
@MustBeClosed
|
||||
protected static <T> Stream<T> toStream(final ResultSet resultSet, final CheckedFunction<ResultSet, T, SQLException> mapper) {
|
||||
return StreamSupport.stream(new Spliterators.AbstractSpliterator<>(Long.MAX_VALUE, Spliterator.ORDERED) {
|
||||
|
||||
@Override
|
||||
public boolean tryAdvance(final Consumer<? super T> action) {
|
||||
try {
|
||||
if (!resultSet.next()) {
|
||||
resultSet.close();
|
||||
return false;
|
||||
}
|
||||
action.accept(mapper.apply(resultSet));
|
||||
@@ -116,6 +119,7 @@ public abstract class JdbcDatabase extends SqlDatabase {
|
||||
* @return Result of the query mapped to a stream.
|
||||
* @throws SQLException SQL related exceptions.
|
||||
*/
|
||||
@MustBeClosed
|
||||
public abstract <T> Stream<T> resultSetQuery(CheckedFunction<Connection, ResultSet, SQLException> query,
|
||||
CheckedFunction<ResultSet, T, SQLException> recordTransform)
|
||||
throws SQLException;
|
||||
@@ -135,6 +139,7 @@ public abstract class JdbcDatabase extends SqlDatabase {
|
||||
* @return Result of the query mapped to a stream.void execute(String sql)
|
||||
* @throws SQLException SQL related exceptions.
|
||||
*/
|
||||
@MustBeClosed
|
||||
public abstract <T> Stream<T> query(CheckedFunction<Connection, PreparedStatement, SQLException> statementCreator,
|
||||
CheckedFunction<ResultSet, T, SQLException> recordTransform)
|
||||
throws SQLException;
|
||||
@@ -154,6 +159,7 @@ public abstract class JdbcDatabase extends SqlDatabase {
|
||||
}
|
||||
}
|
||||
|
||||
@MustBeClosed
|
||||
@Override
|
||||
public Stream<JsonNode> query(final String sql, final String... params) throws SQLException {
|
||||
return query(connection -> {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
package io.airbyte.db.jdbc;
|
||||
|
||||
import com.google.errorprone.annotations.MustBeClosed;
|
||||
import io.airbyte.commons.functional.CheckedConsumer;
|
||||
import io.airbyte.commons.functional.CheckedFunction;
|
||||
import io.airbyte.db.JdbcCompatibleSourceOperations;
|
||||
@@ -61,6 +62,7 @@ public class StreamingJdbcDatabase extends JdbcDatabase {
|
||||
}
|
||||
|
||||
@Override
|
||||
@MustBeClosed
|
||||
public <T> Stream<T> resultSetQuery(final CheckedFunction<Connection, ResultSet, SQLException> query,
|
||||
final CheckedFunction<ResultSet, T, SQLException> recordTransform)
|
||||
throws SQLException {
|
||||
@@ -84,6 +86,7 @@ public class StreamingJdbcDatabase extends JdbcDatabase {
|
||||
* @throws SQLException SQL related exceptions.
|
||||
*/
|
||||
@Override
|
||||
@MustBeClosed
|
||||
public <T> Stream<T> query(final CheckedFunction<Connection, PreparedStatement, SQLException> statementCreator,
|
||||
final CheckedFunction<ResultSet, T, SQLException> recordTransform)
|
||||
throws SQLException {
|
||||
|
||||
@@ -19,6 +19,7 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -34,31 +35,33 @@ public class TestDefaultJdbcDatabase {
|
||||
|
||||
private static PostgreSQLContainer<?> PSQL_DB;
|
||||
|
||||
private JsonNode config;
|
||||
private JdbcDatabase database;
|
||||
private final JdbcSourceOperations sourceOperations = JdbcUtils.getDefaultSourceOperations();
|
||||
|
||||
@BeforeAll
|
||||
static void init() {
|
||||
PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine");
|
||||
PSQL_DB.start();
|
||||
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
void setup() throws Exception {
|
||||
final String dbName = Strings.addRandomSuffix("db", "_", 10);
|
||||
|
||||
config = getConfig(PSQL_DB, dbName);
|
||||
|
||||
final JsonNode config = getConfig(PSQL_DB, dbName);
|
||||
final String initScriptName = "init_" + dbName.concat(".sql");
|
||||
final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";");
|
||||
PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB);
|
||||
|
||||
final JdbcDatabase database = getDatabaseFromConfig(config);
|
||||
database = getDatabaseFromConfig(config);
|
||||
database.execute(connection -> {
|
||||
connection.createStatement().execute("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));");
|
||||
connection.createStatement().execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');");
|
||||
});
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void close() throws Exception {
|
||||
database.close();
|
||||
}
|
||||
|
||||
@@ -69,7 +72,7 @@ public class TestDefaultJdbcDatabase {
|
||||
|
||||
@Test
|
||||
void testBufferedResultQuery() throws SQLException {
|
||||
final List<JsonNode> actual = getDatabaseFromConfig(config).bufferedResultSetQuery(
|
||||
final List<JsonNode> actual = database.bufferedResultSetQuery(
|
||||
connection -> connection.createStatement().executeQuery("SELECT * FROM id_and_name;"),
|
||||
sourceOperations::rowToJson);
|
||||
|
||||
@@ -78,7 +81,7 @@ public class TestDefaultJdbcDatabase {
|
||||
|
||||
@Test
|
||||
void testResultSetQuery() throws SQLException {
|
||||
final Stream<JsonNode> actual = getDatabaseFromConfig(config).resultSetQuery(
|
||||
final Stream<JsonNode> actual = database.resultSetQuery(
|
||||
connection -> connection.createStatement().executeQuery("SELECT * FROM id_and_name;"),
|
||||
sourceOperations::rowToJson);
|
||||
final List<JsonNode> actualAsList = actual.collect(Collectors.toList());
|
||||
@@ -89,7 +92,7 @@ public class TestDefaultJdbcDatabase {
|
||||
|
||||
@Test
|
||||
void testQuery() throws SQLException {
|
||||
final Stream<JsonNode> actual = getDatabaseFromConfig(config).query(
|
||||
final Stream<JsonNode> actual = database.query(
|
||||
connection -> connection.prepareStatement("SELECT * FROM id_and_name;"),
|
||||
sourceOperations::rowToJson);
|
||||
|
||||
|
||||
@@ -45,15 +45,11 @@ public abstract class JdbcSqlOperations implements SqlOperations {
|
||||
public void createSchemaIfNotExists(final JdbcDatabase database, final String schemaName) throws Exception {
|
||||
if (!isSchemaExists(database, schemaName)) {
|
||||
AirbyteSentry.executeWithTracing("CreateSchema",
|
||||
() -> database.execute(createSchemaQuery(schemaName)),
|
||||
() -> database.execute(String.format("CREATE SCHEMA IF NOT EXISTS %s;", schemaName)),
|
||||
Map.of("schema", schemaName));
|
||||
}
|
||||
}
|
||||
|
||||
private String createSchemaQuery(final String schemaName) {
|
||||
return String.format("CREATE SCHEMA IF NOT EXISTS %s;\n", schemaName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createTableIfNotExists(final JdbcDatabase database, final String schemaName, final String tableName) throws SQLException {
|
||||
AirbyteSentry.executeWithTracing("CreateTableIfNotExists",
|
||||
|
||||
@@ -18,8 +18,8 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar
|
||||
|
||||
RUN tar xf ${APPLICATION}.tar --strip-components=1
|
||||
|
||||
ENV APPLICATION_VERSION 0.4.11
|
||||
ENV APPLICATION_VERSION 0.4.12
|
||||
ENV ENABLE_SENTRY true
|
||||
|
||||
LABEL io.airbyte.version=0.4.11
|
||||
LABEL io.airbyte.version=0.4.12
|
||||
LABEL io.airbyte.name=airbyte/destination-snowflake
|
||||
|
||||
@@ -5,16 +5,15 @@
|
||||
package io.airbyte.integrations.destination.snowflake;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.airbyte.db.Databases;
|
||||
import io.airbyte.db.jdbc.DataSourceConnectionSupplier;
|
||||
import io.airbyte.db.jdbc.DefaultJdbcDatabase;
|
||||
import io.airbyte.db.jdbc.DefaultJdbcDatabase.CloseableConnectionSupplier;
|
||||
import io.airbyte.db.jdbc.JdbcDatabase;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.SQLException;
|
||||
import io.airbyte.db.jdbc.JdbcUtils;
|
||||
import java.time.Duration;
|
||||
import java.util.Properties;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import java.util.Map;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
/**
|
||||
* SnowflakeDatabase contains helpers to create connections to and run queries on Snowflake.
|
||||
@@ -24,66 +23,41 @@ public class SnowflakeDatabase {
|
||||
private static final Duration NETWORK_TIMEOUT = Duration.ofMinutes(1);
|
||||
private static final Duration QUERY_TIMEOUT = Duration.ofHours(3);
|
||||
private static final SnowflakeSQLNameTransformer nameTransformer = new SnowflakeSQLNameTransformer();
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDatabase.class);
|
||||
private static final String DRIVER_CLASS_NAME = "net.snowflake.client.jdbc.SnowflakeDriver";
|
||||
|
||||
public static Connection getConnection(final JsonNode config) throws SQLException {
|
||||
private static DataSource createDataSource(final JsonNode config) {
|
||||
final String host = config.get("host").asText();
|
||||
final String username = config.get("username").asText();
|
||||
final String password = config.get("password").asText();
|
||||
|
||||
final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?",
|
||||
config.get("host").asText()));
|
||||
|
||||
final Properties properties = new Properties();
|
||||
|
||||
properties.put("user", config.get("username").asText());
|
||||
properties.put("password", config.get("password").asText());
|
||||
properties.put("warehouse", config.get("warehouse").asText());
|
||||
properties.put("database", config.get("database").asText());
|
||||
properties.put("role", config.get("role").asText());
|
||||
properties.put("schema", nameTransformer.getIdentifier(config.get("schema").asText()));
|
||||
|
||||
properties.put("networkTimeout", Math.toIntExact(NETWORK_TIMEOUT.toSeconds()));
|
||||
properties.put("queryTimeout", Math.toIntExact(QUERY_TIMEOUT.toSeconds()));
|
||||
// allows queries to contain any number of statements.
|
||||
properties.put("MULTI_STATEMENT_COUNT", 0);
|
||||
|
||||
// https://docs.snowflake.com/en/user-guide/jdbc-parameters.html#application
|
||||
// identify airbyte traffic to snowflake to enable partnership & optimization opportunities
|
||||
properties.put("application", "airbyte");
|
||||
// Needed for JDK17 - see
|
||||
// https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow
|
||||
properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON");
|
||||
|
||||
// https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string
|
||||
final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", host));
|
||||
if (config.has("jdbc_url_params")) {
|
||||
jdbcUrl.append(config.get("jdbc_url_params").asText());
|
||||
}
|
||||
|
||||
LOGGER.info(jdbcUrl.toString());
|
||||
final Map<String, String> properties = new ImmutableMap.Builder<String, String>()
|
||||
.put("warehouse", config.get("warehouse").asText())
|
||||
.put("database", config.get("database").asText())
|
||||
.put("role", config.get("role").asText())
|
||||
.put("schema", nameTransformer.getIdentifier(config.get("schema").asText()))
|
||||
.put("networkTimeout", String.valueOf(Math.toIntExact(NETWORK_TIMEOUT.toSeconds())))
|
||||
.put("queryTimeout", String.valueOf(Math.toIntExact(QUERY_TIMEOUT.toSeconds())))
|
||||
// allows queries to contain any number of statements
|
||||
.put("MULTI_STATEMENT_COUNT", "0")
|
||||
// https://docs.snowflake.com/en/user-guide/jdbc-parameters.html#application
|
||||
// identify airbyte traffic to snowflake to enable partnership & optimization opportunities
|
||||
.put("application", "airbyte")
|
||||
// Needed for JDK17 - see
|
||||
// https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow
|
||||
.put("JDBC_QUERY_RESULT_FORMAT", "JSON")
|
||||
.build();
|
||||
|
||||
return DriverManager.getConnection(jdbcUrl.toString(), properties);
|
||||
return Databases.createBasicDataSource(username, password, jdbcUrl.toString(), DRIVER_CLASS_NAME, properties);
|
||||
}
|
||||
|
||||
public static JdbcDatabase getDatabase(final JsonNode config) {
|
||||
return new DefaultJdbcDatabase(new SnowflakeConnectionSupplier(config));
|
||||
}
|
||||
|
||||
private static final class SnowflakeConnectionSupplier implements CloseableConnectionSupplier {
|
||||
|
||||
private final JsonNode config;
|
||||
|
||||
public SnowflakeConnectionSupplier(final JsonNode config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Connection getConnection() throws SQLException {
|
||||
return SnowflakeDatabase.getConnection(config);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// no op.
|
||||
}
|
||||
|
||||
final DataSource dataSource = createDataSource(config);
|
||||
return new DefaultJdbcDatabase(new DataSourceConnectionSupplier(dataSource), JdbcUtils.getDefaultSourceOperations());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
package io.airbyte.integrations.destination.snowflake;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import io.airbyte.db.jdbc.JdbcDatabase;
|
||||
import io.airbyte.integrations.base.JavaBaseConstants;
|
||||
import io.airbyte.integrations.destination.jdbc.JdbcSqlOperations;
|
||||
@@ -12,6 +13,7 @@ import io.airbyte.integrations.destination.jdbc.SqlOperationsUtils;
|
||||
import io.airbyte.protocol.models.AirbyteRecordMessage;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -20,19 +22,21 @@ class SnowflakeSqlOperations extends JdbcSqlOperations implements SqlOperations
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSqlOperations.class);
|
||||
|
||||
@Override
|
||||
public void createTableIfNotExists(final JdbcDatabase database, final String schemaName, final String tableName) throws SQLException {
|
||||
final String createTableQuery = String.format(
|
||||
public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) {
|
||||
return String.format(
|
||||
"CREATE TABLE IF NOT EXISTS %s.%s ( \n"
|
||||
+ "%s VARCHAR PRIMARY KEY,\n"
|
||||
+ "%s VARIANT,\n"
|
||||
+ "%s TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp()\n"
|
||||
+ ") data_retention_time_in_days = 0;",
|
||||
schemaName, tableName, JavaBaseConstants.COLUMN_NAME_AB_ID, JavaBaseConstants.COLUMN_NAME_DATA, JavaBaseConstants.COLUMN_NAME_EMITTED_AT);
|
||||
database.execute(createTableQuery);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSchemaExists(JdbcDatabase database, String outputSchema) throws Exception {
|
||||
return database.query(SHOW_SCHEMAS).map(schemas -> schemas.get(NAME).asText()).anyMatch(outputSchema::equalsIgnoreCase);
|
||||
try (final Stream<JsonNode> results = database.query(SHOW_SCHEMAS)) {
|
||||
return results.map(schemas -> schemas.get(NAME).asText()).anyMatch(outputSchema::equalsIgnoreCase);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -5,9 +5,7 @@
|
||||
package io.airbyte.integrations.destination.snowflake;
|
||||
|
||||
import io.airbyte.db.jdbc.JdbcDatabase;
|
||||
import io.airbyte.integrations.base.JavaBaseConstants;
|
||||
import io.airbyte.integrations.base.sentry.AirbyteSentry;
|
||||
import io.airbyte.integrations.destination.jdbc.JdbcSqlOperations;
|
||||
import io.airbyte.integrations.destination.jdbc.SqlOperations;
|
||||
import io.airbyte.protocol.models.AirbyteRecordMessage;
|
||||
import java.io.File;
|
||||
@@ -19,15 +17,15 @@ import java.util.UUID;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class SnowflakeStagingSqlOperations extends JdbcSqlOperations implements SqlOperations {
|
||||
public class SnowflakeStagingSqlOperations extends SnowflakeSqlOperations implements SqlOperations {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSqlOperations.class);
|
||||
|
||||
@Override
|
||||
protected void insertRecordsInternal(final JdbcDatabase database,
|
||||
final List<AirbyteRecordMessage> records,
|
||||
final String schemaName,
|
||||
final String stage) {
|
||||
public void insertRecordsInternal(final JdbcDatabase database,
|
||||
final List<AirbyteRecordMessage> records,
|
||||
final String schemaName,
|
||||
final String stage) {
|
||||
LOGGER.info("actual size of batch for staging: {}", records.size());
|
||||
|
||||
if (records.isEmpty()) {
|
||||
@@ -70,33 +68,10 @@ public class SnowflakeStagingSqlOperations extends JdbcSqlOperations implements
|
||||
Map.of("stage", stageName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createTableIfNotExists(final JdbcDatabase database, final String schemaName, final String tableName) throws SQLException {
|
||||
AirbyteSentry.executeWithTracing("CreateTableIfNotExists",
|
||||
() -> database.execute(createTableQuery(database, schemaName, tableName)),
|
||||
Map.of("schema", schemaName, "table", tableName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) {
|
||||
return String.format(
|
||||
"CREATE TABLE IF NOT EXISTS %s.%s ( \n"
|
||||
+ "%s VARCHAR PRIMARY KEY,\n"
|
||||
+ "%s VARIANT,\n"
|
||||
+ "%s TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp()\n"
|
||||
+ ") data_retention_time_in_days = 0;",
|
||||
schemaName, tableName, JavaBaseConstants.COLUMN_NAME_AB_ID, JavaBaseConstants.COLUMN_NAME_DATA, JavaBaseConstants.COLUMN_NAME_EMITTED_AT);
|
||||
}
|
||||
|
||||
public void cleanUpStage(final JdbcDatabase database, final String path) throws SQLException {
|
||||
AirbyteSentry.executeWithTracing("CleanStage",
|
||||
() -> database.execute(String.format("REMOVE @%s;", path)),
|
||||
Map.of("path", path));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSchemaExists(final JdbcDatabase database, final String outputSchema) throws Exception {
|
||||
return database.query(SHOW_SCHEMAS).map(schemas -> schemas.get(NAME).asText()).anyMatch(outputSchema::equalsIgnoreCase);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -4,21 +4,20 @@
|
||||
|
||||
package io.airbyte.integrations.destination.snowflake;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.commons.string.Strings;
|
||||
import io.airbyte.db.jdbc.JdbcDatabase;
|
||||
import io.airbyte.protocol.models.AirbyteConnectionStatus;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.Properties;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class SnowflakeDestinationIntegrationTest {
|
||||
@@ -37,59 +36,32 @@ class SnowflakeDestinationIntegrationTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidSchemaName() {
|
||||
assertDoesNotThrow(this::syncWithNamingResolver);
|
||||
assertThrows(SQLException.class, this::syncWithoutNamingResolver);
|
||||
|
||||
}
|
||||
|
||||
public void syncWithNamingResolver() throws IOException, SQLException {
|
||||
public void testInvalidSchemaName() throws Exception {
|
||||
final JsonNode config = getConfig();
|
||||
final String createSchemaQuery = String.format("CREATE SCHEMA %s", namingResolver.getIdentifier(config.get("schema").asText()));
|
||||
Connection connection = null;
|
||||
final String schema = config.get("schema").asText();
|
||||
try (final JdbcDatabase database = SnowflakeDatabase.getDatabase(config)) {
|
||||
assertDoesNotThrow(() -> syncWithNamingResolver(database, schema));
|
||||
assertThrows(SQLException.class, () -> syncWithoutNamingResolver(database, schema));
|
||||
}
|
||||
}
|
||||
|
||||
public void syncWithNamingResolver(final JdbcDatabase database, final String schema) throws SQLException {
|
||||
final String normalizedSchemaName = namingResolver.getIdentifier(schema);
|
||||
try {
|
||||
connection = SnowflakeDatabase.getConnection(config);
|
||||
connection.createStatement().execute(createSchemaQuery);
|
||||
database.execute(String.format("CREATE SCHEMA %s", normalizedSchemaName));
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
final String dropSchemaQuery = String.format("DROP SCHEMA IF EXISTS %s", namingResolver.getIdentifier(config.get("schema").asText()));
|
||||
connection.createStatement().execute(dropSchemaQuery);
|
||||
connection.close();
|
||||
}
|
||||
database.execute(String.format("DROP SCHEMA IF EXISTS %s", normalizedSchemaName));
|
||||
}
|
||||
}
|
||||
|
||||
private void syncWithoutNamingResolver() throws SQLException, IOException {
|
||||
JsonNode config = getConfig();
|
||||
String schemaName = config.get("schema").asText();
|
||||
final String createSchemaQuery = String.format("CREATE SCHEMA %s", schemaName);
|
||||
|
||||
try (Connection connection = getConnection(config, false)) {
|
||||
Statement statement = connection.createStatement();
|
||||
statement.execute(createSchemaQuery);
|
||||
private void syncWithoutNamingResolver(final JdbcDatabase database, final String schema) throws SQLException {
|
||||
try {
|
||||
database.execute(String.format("CREATE SCHEMA %s", schema));
|
||||
} finally {
|
||||
database.execute(String.format("DROP SCHEMA IF EXISTS %s", schema));
|
||||
}
|
||||
}
|
||||
|
||||
public Connection getConnection(JsonNode config, boolean useNameTransformer) throws SQLException {
|
||||
|
||||
final String connectUrl = String.format("jdbc:snowflake://%s", config.get("host").asText());
|
||||
|
||||
final Properties properties = new Properties();
|
||||
|
||||
properties.put("user", config.get("username").asText());
|
||||
properties.put("password", config.get("password").asText());
|
||||
properties.put("warehouse", config.get("warehouse").asText());
|
||||
properties.put("database", config.get("database").asText());
|
||||
properties.put("role", config.get("role").asText());
|
||||
properties.put("schema", useNameTransformer
|
||||
? namingResolver.getIdentifier(config.get("schema").asText())
|
||||
: config.get("schema").asText());
|
||||
|
||||
properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON");
|
||||
|
||||
return DriverManager.getConnection(connectUrl, properties);
|
||||
}
|
||||
|
||||
private JsonNode getConfig() throws IOException {
|
||||
final JsonNode config = Jsons.deserialize(new String(Files.readAllBytes(Paths.get("secrets/insert_config.json"))));
|
||||
final String schemaName = "schemaName with whitespace " + Strings.addRandomSuffix("integration_test", "_", 5);
|
||||
|
||||
@@ -14,6 +14,7 @@ import io.airbyte.commons.io.IOs;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.commons.resources.MoreResources;
|
||||
import io.airbyte.commons.string.Strings;
|
||||
import io.airbyte.db.jdbc.JdbcDatabase;
|
||||
import io.airbyte.db.jdbc.JdbcUtils;
|
||||
import io.airbyte.integrations.base.JavaBaseConstants;
|
||||
import io.airbyte.integrations.destination.ExtendedNameTransformer;
|
||||
@@ -37,10 +38,10 @@ import org.junit.jupiter.params.provider.ArgumentsSource;
|
||||
|
||||
public class SnowflakeInsertDestinationAcceptanceTest extends DestinationAcceptanceTest {
|
||||
|
||||
// config from which to create / delete schemas.
|
||||
private JsonNode baseConfig;
|
||||
// config which refers to the schema that the test is being run in.
|
||||
// this config is based on the static config, and it contains a random
|
||||
// schema name that is different for each test run
|
||||
private JsonNode config;
|
||||
private JdbcDatabase database;
|
||||
private final ExtendedNameTransformer namingResolver = new ExtendedNameTransformer();
|
||||
|
||||
@Override
|
||||
@@ -122,7 +123,7 @@ public class SnowflakeInsertDestinationAcceptanceTest extends DestinationAccepta
|
||||
}
|
||||
|
||||
private List<JsonNode> retrieveRecordsFromTable(final String tableName, final String schema) throws SQLException {
|
||||
return SnowflakeDatabase.getDatabase(getConfig()).bufferedResultSetQuery(
|
||||
return database.bufferedResultSetQuery(
|
||||
connection -> {
|
||||
final ResultSet tableInfo = connection.createStatement()
|
||||
.executeQuery(String.format("SHOW TABLES LIKE '%s' IN SCHEMA %s;", tableName, schema));
|
||||
@@ -142,18 +143,18 @@ public class SnowflakeInsertDestinationAcceptanceTest extends DestinationAccepta
|
||||
final String schemaName = Strings.addRandomSuffix("integration_test", "_", 5);
|
||||
final String createSchemaQuery = String.format("CREATE SCHEMA %s", schemaName);
|
||||
|
||||
baseConfig = getStaticConfig();
|
||||
SnowflakeDatabase.getDatabase(baseConfig).execute(createSchemaQuery);
|
||||
this.config = Jsons.clone(getStaticConfig());
|
||||
((ObjectNode) config).put("schema", schemaName);
|
||||
|
||||
final JsonNode configForSchema = Jsons.clone(baseConfig);
|
||||
((ObjectNode) configForSchema).put("schema", schemaName);
|
||||
config = configForSchema;
|
||||
database = SnowflakeDatabase.getDatabase(config);
|
||||
database.execute(createSchemaQuery);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown(final TestDestinationEnv testEnv) throws Exception {
|
||||
final String createSchemaQuery = String.format("DROP SCHEMA IF EXISTS %s", config.get("schema").asText());
|
||||
SnowflakeDatabase.getDatabase(baseConfig).execute(createSchemaQuery);
|
||||
database.execute(createSchemaQuery);
|
||||
database.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -217,6 +217,7 @@ Finally, you need to add read/write permissions to your bucket with that email.
|
||||
|
||||
| Version | Date | Pull Request | Subject |
|
||||
|:--------|:-----------| :----- | :------ |
|
||||
| 0.4.12 | 2022-02-15 | [\#10342](https://github.com/airbytehq/airbyte/pull/10342) | Use connection pool, and fix connection leak. |
|
||||
| 0.4.11 | 2022-02-14 | [\#9920](https://github.com/airbytehq/airbyte/pull/9920) | Updated the size of staging files for S3 staging. Also, added closure of S3 writers to staging files when data has been written to an staging file. |
|
||||
| 0.4.10 | 2022-02-14 | [\#10297](https://github.com/airbytehq/airbyte/pull/10297) | Halve the record buffer size to reduce memory consumption. |
|
||||
| 0.4.9 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `ExitOnOutOfMemoryError` JVM flag. |
|
||||
|
||||
Reference in New Issue
Block a user