1
0
mirror of synced 2025-12-22 19:38:29 -05:00

release v0.8.0-alpha (#1339)

This commit is contained in:
Charles
2020-12-16 14:01:57 -08:00
committed by GitHub
parent f5f58e2f64
commit 01cdeafdb1
17 changed files with 41 additions and 17 deletions

2
.env
View File

@@ -1,4 +1,4 @@
VERSION=0.7.2-alpha
VERSION=0.8.0-alpha
DATABASE_USER=docker
DATABASE_PASSWORD=docker
DATABASE_DB=airbyte

View File

@@ -2,6 +2,6 @@
"destinationDefinitionId": "22f6c74f-5699-40ff-833c-4a879ea40133",
"name": "BigQuery",
"dockerRepository": "airbyte/destination-bigquery",
"dockerImageTag": "0.1.10",
"dockerImageTag": "0.1.11",
"documentationUrl": "https://hub.docker.com/r/airbyte/integration-singer-bigquery-destination"
}

View File

@@ -11,7 +11,7 @@
- destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133
name: BigQuery
dockerRepository: airbyte/destination-bigquery
dockerImageTag: 0.1.10
dockerImageTag: 0.1.11
documentationUrl: https://hub.docker.com/r/airbyte/integration-singer-bigquery-destination
- destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
name: Snowflake

View File

@@ -8,5 +8,5 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar
RUN tar xf ${APPLICATION}.tar --strip-components=1
LABEL io.airbyte.version=0.1.10
LABEL io.airbyte.version=0.1.11
LABEL io.airbyte.name=airbyte/destination-bigquery

View File

@@ -28,9 +28,10 @@ import io.airbyte.config.JobConfig;
import io.airbyte.scheduler.persistence.JobPersistence;
import java.io.IOException;
import java.time.Instant;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -52,24 +53,35 @@ public class JobRetrier implements Runnable {
public void run() {
LOGGER.info("Running job-retrier...");
listFailedJobs()
final AtomicInteger failedJobs = new AtomicInteger();
final AtomicInteger retriedJobs = new AtomicInteger();
final List<Job> incompleteJobs = incompleteJobs();
incompleteJobs
.forEach(job -> {
LOGGER.info("weeee");
if (hasReachedMaxAttempt(job)) {
failJob(job);
failedJobs.incrementAndGet();
return;
}
if (shouldRetry(job)) {
retriedJobs.incrementAndGet();
resetJob(job);
}
});
LOGGER.info("Completed job-retrier...");
LOGGER.info("Completed Job-Retrier...");
LOGGER.info("Job-Retrier Summary. Incomplete jobs: {}, Job set to retry: {}, Jobs set to failed: {}",
incompleteJobs.size(),
failedJobs.get(),
retriedJobs.get());
}
private Stream<Job> listFailedJobs() {
private List<Job> incompleteJobs() {
try {
return persistence.listJobsWithStatus(JobConfig.ConfigType.SYNC, JobStatus.INCOMPLETE).stream();
return persistence.listJobsWithStatus(JobConfig.ConfigType.SYNC, JobStatus.INCOMPLETE);
} catch (IOException e) {
throw new RuntimeException("failed to fetch failed jobs", e);
}

View File

@@ -39,6 +39,7 @@ import java.io.IOException;
import java.time.Instant;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiPredicate;
import java.util.stream.Collectors;
import org.slf4j.Logger;
@@ -80,14 +81,17 @@ public class JobScheduler implements Runnable {
scheduleSyncJobs();
LOGGER.info("Completed job-scheduler...");
LOGGER.info("Completed Job-Scheduler...");
} catch (Throwable e) {
LOGGER.error("Job Scheduler Error", e);
}
}
private void scheduleSyncJobs() throws IOException {
for (StandardSync connection : getAllActiveConnections()) {
final AtomicInteger jobsScheduled = new AtomicInteger();
final List<StandardSync> activeConnections = getAllActiveConnections();
for (StandardSync connection : activeConnections) {
final Optional<Job> previousJobOptional = jobPersistence.getLastSyncJob(connection.getConnectionId());
final StandardSyncSchedule standardSyncSchedule = getStandardSyncSchedule(connection);
@@ -95,6 +99,7 @@ public class JobScheduler implements Runnable {
jobFactory.create(connection.getConnectionId());
}
}
LOGGER.info("Job-Scheduler Summary. Active connections: {}, Jobs scheduler: {}", activeConnections.size(), jobsScheduled.get());
}
private StandardSyncSchedule getStandardSyncSchedule(StandardSync connection) {

View File

@@ -80,9 +80,10 @@ public class JobSubmitter implements Runnable {
oldestPendingJob.ifPresent(job -> {
trackSubmission(job);
submitJob(job);
LOGGER.info("Job-Submitter Summary. Submitted job with scope {}", job.getScope());
});
LOGGER.info("Completed job-submitter...");
LOGGER.info("Completed Job-Submitter...");
} catch (Throwable e) {
LOGGER.error("Job Submitter Error", e);
}

View File

@@ -25,6 +25,7 @@ Each stream will be output into its own table in BigQuery. Each table will conta
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |
## Getting started

View File

@@ -19,6 +19,7 @@ Each stream will be output into its own file. Each file will contain 3 columns:
| Feature | Supported |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental - Append Sync | Yes | |
#### Performance considerations

View File

@@ -21,6 +21,7 @@ Each stream will be output into its own table in Postgres. Each table will conta
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |
## Getting started

View File

@@ -21,6 +21,7 @@ Each stream will be output into its own raw table in Redshift. Each table will c
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |
#### Target Database

View File

@@ -19,6 +19,8 @@ Each stream will be output into its own table in Snowflake. Each table will cont
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | Yes | |
| Incremental - Append Sync | Yes | |
## Getting started

View File

@@ -31,7 +31,7 @@ If there are more endpoints you'd like Airbyte to support, please [create an iss
| Feature | Supported? |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| SSL connection | Yes |

View File

@@ -25,7 +25,7 @@ If there are more endpoints you'd like Airbyte to support, please [create an iss
| Feature | Supported? |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| SSL connection | Yes |

View File

@@ -33,7 +33,7 @@ If you do not see a type in this list, assume that it is coerced into a string.
| Feature | Supported |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| Logical Replication \(WAL\) | No |
| SSL Support | Yes |

View File

@@ -52,7 +52,7 @@ Postgres data types are mapped to the following data types when synchronizing da
| Feature | Supported |
| :--- | :--- |
| Full Refresh Sync | Yes |
| Incremental Sync | No |
| Incremental - Append Sync | Yes |
| Replicate Incremental Deletes | No |
| Logical Replication \(WAL\) | No |
| SSL Support | Yes |

View File

@@ -46,7 +46,7 @@ This Source is capable of syncing the following core Streams:
| Feature | Supported?\(Yes/No\) | Notes |
| :--- | :--- | :--- |
| Full Refresh Sync | yes | |
| Incremental Sync | no | |
| Incremental - Append Sync | Yes |
### Performance considerations