Feat: Metadata Service remove old catalog system (#26013)
* Remove Definition File References from Python files (#25590)
* Remove check_images_exist.sh
* Update definitions.py
* Update build_report.py
* Update tools/bin/ci_integration_workflow_launcher.py
* Update tools/bin/ci_check_dependency.py
* tools/bin/scan_all_spec_airbyte_secret.py
* Remove qa engine references
* Revert "Remove check_images_exist.sh"
This reverts commit 7675162789.
* Improve get url function
* Add test
* remove scan_all_spec_airbyte_secret.py
* add additional test
* Remove check_images_exist.sh (#25593)
* Remove Definition File References from Java files (LocalDefinitionProvider), shell scripts and docs (#25592)
* Remove CombinedConnectorCatalogGenerator.java
* Update local definition provider
* Update local def test
* Add spec mask downloader
* Make downloader work
* Delete generators and add tests
* REMOVE THE YAML FILES
* Roughly update docs
* Update shell scripts
* Remove unused
* Add connector metadata file doc
* Apply suggestions from code review
Co-authored-by: Augustin <augustin@airbyte.io>
* Additional PR comments
* Run format tasks
---------
Co-authored-by: Augustin <augustin@airbyte.io>
* Remove unused import
* bundle registry
* Ignore future updates
* Update registry
* new registry
* Readd maskeddatainterceptor
* Automated Change
* Remove icon validation
* Automated Change
* Automated Change
* Source Amazon Ads: get rid of `fail_on_extra_columns: false` in SAT (#25913)
* Source Amazon Ads: small schema fixes
* Source Amazon Ads: update changelog
* Source Amazon Ads: update unittest
* Source Amazon Ads: unittest additional property is boolean
* Source Amazon Ads: bump version
* auto-bump connector version
---------
Co-authored-by: Octavia Squidington III <octavia-squidington-iii@users.noreply.github.com>
* connectors-ci: make spec-cache / metadata bucket and creds not required for pre-release (#26119)
* Automated Change
---------
Co-authored-by: Augustin <augustin@airbyte.io>
Co-authored-by: bnchrch <bnchrch@users.noreply.github.com>
Co-authored-by: Roman Yermilov [GL] <86300758+roman-yermilov-gl@users.noreply.github.com>
Co-authored-by: Octavia Squidington III <octavia-squidington-iii@users.noreply.github.com>
This commit is contained in:
45
.github/workflows/deploy-oss-catalog.yml
vendored
45
.github/workflows/deploy-oss-catalog.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: Deploy OSS Connector Catalog to GCS
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- airbyte-config-oss/init-oss/src/main/resources/seed/**
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy-catalog:
|
||||
name: "Deploy Catalog"
|
||||
if: github.ref == 'refs/heads/master'
|
||||
runs-on: ubuntu-latest
|
||||
concurrency: deploy-oss-connector-catalog
|
||||
steps:
|
||||
- name: Checkout Airbyte
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@v0
|
||||
with:
|
||||
service_account_key: ${{ secrets.PROD_SPEC_CACHE_SA_KEY }}
|
||||
export_default_credentials: true
|
||||
- name: Install Java
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: "17"
|
||||
- name: Generate catalog
|
||||
run: SUB_BUILD=ALL_CONNECTORS ./gradlew :airbyte-config-oss:specs-oss:generateOssConnectorCatalog
|
||||
- name: Upload catalog to GCS
|
||||
shell: bash
|
||||
# TODO remove once new registry in use
|
||||
run: |
|
||||
gcs_bucket_name="prod-airbyte-cloud-connector-metadata-service"
|
||||
catalog_path="airbyte-config-oss/init-oss/src/main/resources/seed/oss_catalog.json"
|
||||
gsutil -h "Cache-Control:public, max-age=10" cp "$catalog_path" "gs://$gcs_bucket_name/oss_catalog.json"
|
||||
- name: Trigger Cloud catalog generation
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }}
|
||||
repository: airbytehq/airbyte-cloud
|
||||
event-type: generate-cloud-catalog
|
||||
11
.github/workflows/gradle.yml
vendored
11
.github/workflows/gradle.yml
vendored
@@ -28,17 +28,6 @@ on:
|
||||
permissions: write-all
|
||||
|
||||
jobs:
|
||||
# COMMON TASKS
|
||||
ensure-images-exist:
|
||||
name: "Ensure all required Docker images exist on Dockerhub"
|
||||
timeout-minutes: 10
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Airbyte
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check images exist
|
||||
run: ./tools/bin/check_images_exist.sh all
|
||||
# The output of this job is used to trigger the following builds.
|
||||
changes:
|
||||
name: "Detect Modified Files"
|
||||
|
||||
21
.github/workflows/publish-command.yml
vendored
21
.github/workflows/publish-command.yml
vendored
@@ -340,27 +340,6 @@ jobs:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_CONNECTOR_RELEASE_AUTH_TOKEN }}
|
||||
SENTRY_ORG: airbytehq
|
||||
SENTRY_PROJECT: connector-incident-management
|
||||
- name: Check if connector in definitions yaml
|
||||
if: github.event.inputs.auto-bump-version == 'true' && github.event.inputs.pre-release != 'true' && success()
|
||||
run: |
|
||||
connector="airbyte/${{ env.IMAGE_NAME }}"
|
||||
definitionpath=./airbyte-config-oss/init-oss/src/main/resources/seed/
|
||||
sourcecheck=$(yq e ".. | select(has(\"dockerRepository\")) | select(.dockerRepository == \"$connector\")" "$definitionpath"source_definitions.yaml)
|
||||
destcheck=$(yq e ".. | select(has(\"dockerRepository\")) | select(.dockerRepository == \"$connector\")" "$definitionpath"destination_definitions.yaml)
|
||||
if [[ (-z "$sourcecheck" && -z "$destcheck") ]]
|
||||
then exit 1
|
||||
fi
|
||||
- name: Bump version in definitions yaml
|
||||
if: github.event.inputs.auto-bump-version == 'true' && github.event.inputs.pre-release != 'true' && success()
|
||||
run: |
|
||||
connector="airbyte/${{ env.IMAGE_NAME }}"
|
||||
definitionpath=./airbyte-config-oss/init-oss/src/main/resources/seed/
|
||||
sourcename=$(yq e ".[] | select(has(\"dockerRepository\")) | select(.dockerRepository == \"$connector\") | .name" "$definitionpath"source_definitions.yaml)
|
||||
destname=$(yq e ".[] | select(has(\"dockerRepository\")) | select(.dockerRepository == \"$connector\") | .name" "$definitionpath"destination_definitions.yaml)
|
||||
if [ -z "$sourcename" ]
|
||||
then yq e "(.[] | select(.name == \"$destname\").dockerImageTag)|=\"${{ env.IMAGE_VERSION }}\"" -i "$definitionpath"destination_definitions.yaml
|
||||
else yq e "(.[] | select(.name == \"$sourcename\").dockerImageTag)|=\"${{ env.IMAGE_VERSION }}\"" -i "$definitionpath"source_definitions.yaml
|
||||
fi
|
||||
- name: Run gradle process changes
|
||||
if: github.event.inputs.auto-bump-version == 'true' && github.event.inputs.pre-release != 'true' && success()
|
||||
uses: Wandalen/wretry.action@master
|
||||
|
||||
@@ -33,8 +33,6 @@ repos:
|
||||
exclude: |
|
||||
(?x)^.*(
|
||||
.github/|
|
||||
source_specs.yaml|
|
||||
destination_specs.yaml|
|
||||
.gitlab-ci.yml
|
||||
).?$
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ public interface FeatureFlags {
|
||||
|
||||
/**
|
||||
* Get the workspaces allow-listed for strict incremental comparison in normalization. This takes
|
||||
* precedence over the normalization version in destination_definitions.yaml.
|
||||
* precedence over the normalization version in oss_registry.json .
|
||||
*
|
||||
* @return a comma-separated list of workspace ids where strict incremental comparison should be
|
||||
* enabled in normalization.
|
||||
|
||||
@@ -0,0 +1,142 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.commons.logging;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import io.airbyte.commons.constants.AirbyteSecretConstants;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.commons.yaml.Yamls;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.core.LogEvent;
|
||||
import org.apache.logging.log4j.core.appender.rewrite.RewritePolicy;
|
||||
import org.apache.logging.log4j.core.config.plugins.Plugin;
|
||||
import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
|
||||
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
|
||||
import org.apache.logging.log4j.core.impl.Log4jLogEvent;
|
||||
import org.apache.logging.log4j.message.SimpleMessage;
|
||||
import org.apache.logging.log4j.status.StatusLogger;
|
||||
|
||||
/**
|
||||
* Custom Log4j2 {@link RewritePolicy} used to intercept all log messages and mask any JSON
|
||||
* properties in the message that match the list of maskable properties.
|
||||
* <p>
|
||||
* The maskable properties file is generated by a Gradle task in the {@code :airbyte-config:specs}
|
||||
* project. The file is named {@code specs_secrets_mask.yaml} and is located in the
|
||||
* {@code src/main/resources/seed} directory of the {@link :airbyte-config:init} project.
|
||||
*/
|
||||
@Plugin(name = "MaskedDataInterceptor",
|
||||
category = "Core",
|
||||
elementType = "rewritePolicy",
|
||||
printObject = true)
|
||||
public class MaskedDataInterceptor implements RewritePolicy {
|
||||
|
||||
protected static final Logger logger = StatusLogger.getLogger();
|
||||
|
||||
/**
|
||||
* The pattern used to determine if a message contains sensitive data.
|
||||
*/
|
||||
private final Optional<String> pattern;
|
||||
|
||||
@PluginFactory
|
||||
public static MaskedDataInterceptor createPolicy(
|
||||
@PluginAttribute(value = "specMaskFile",
|
||||
defaultString = "/seed/specs_secrets_mask.yaml") final String specMaskFile) {
|
||||
return new MaskedDataInterceptor(specMaskFile);
|
||||
}
|
||||
|
||||
private MaskedDataInterceptor(final String specMaskFile) {
|
||||
this.pattern = buildPattern(specMaskFile);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LogEvent rewrite(final LogEvent source) {
|
||||
return Log4jLogEvent.newBuilder()
|
||||
.setLoggerName(source.getLoggerName())
|
||||
.setMarker(source.getMarker())
|
||||
.setLoggerFqcn(source.getLoggerFqcn())
|
||||
.setLevel(source.getLevel())
|
||||
.setMessage(new SimpleMessage(applyMask(source.getMessage().getFormattedMessage())))
|
||||
.setThrown(source.getThrown())
|
||||
.setContextMap(source.getContextMap())
|
||||
.setContextStack(source.getContextStack())
|
||||
.setThreadName(source.getThreadName())
|
||||
.setSource(source.getSource())
|
||||
.setTimeMillis(source.getTimeMillis())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the mask to the message, if necessary.
|
||||
*
|
||||
* @param message The log message.
|
||||
* @return The possibly masked log message.
|
||||
*/
|
||||
private String applyMask(final String message) {
|
||||
if (pattern.isPresent()) {
|
||||
return message.replaceAll(pattern.get(), "\"$1\":\"" + AirbyteSecretConstants.SECRETS_MASK + "\"");
|
||||
} else {
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the maskable properties from the provided file.
|
||||
*
|
||||
* @param specMaskFile The spec mask file.
|
||||
* @return The set of maskable properties.
|
||||
*/
|
||||
private Set<String> getMaskableProperties(final String specMaskFile) {
|
||||
// URL url = MaskedDataInterceptor.class.getResource("/");
|
||||
|
||||
// final URL url2 = Resources.getResource("");
|
||||
// try {
|
||||
// logger.info("Loading mask data from {} in class {}", url2, Paths.get(url.toURI()).toFile());
|
||||
// } catch (final Exception e) {
|
||||
// logger.error("Unable to load mask data from '{}': {}.", specMaskFile, e.getMessage());
|
||||
// }
|
||||
|
||||
try {
|
||||
final String maskFileContents = IOUtils.toString(getClass().getResourceAsStream(specMaskFile), Charset.defaultCharset());
|
||||
final Map<String, Set<String>> properties = Jsons.object(Yamls.deserialize(maskFileContents), new TypeReference<>() {});
|
||||
return properties.getOrDefault("properties", Set.of());
|
||||
} catch (final Exception e) {
|
||||
logger.error("Unable to load mask data from '{}': {}.", specMaskFile, e.getMessage());
|
||||
return Set.of();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the maskable property matching pattern.
|
||||
*
|
||||
* @param specMaskFile The spec mask file.
|
||||
* @return The regular expression pattern used to find maskable properties.
|
||||
*/
|
||||
private Optional<String> buildPattern(final String specMaskFile) {
|
||||
final Set<String> maskableProperties = getMaskableProperties(specMaskFile);
|
||||
return !maskableProperties.isEmpty() ? Optional.of(generatePattern(maskableProperties)) : Optional.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the property matching pattern string from the provided set of properties.
|
||||
*
|
||||
* @param properties The set of properties to match.
|
||||
* @return The generated regular expression pattern used to match the maskable properties.
|
||||
*/
|
||||
private String generatePattern(final Set<String> properties) {
|
||||
final StringBuilder builder = new StringBuilder();
|
||||
builder.append("(?i)"); // case insensitive
|
||||
builder.append("\"(");
|
||||
builder.append(properties.stream().collect(Collectors.joining("|")));
|
||||
builder.append(")\"\\s*:\\s*(\"(?:[^\"\\\\]|\\\\.)*\"|\\[[^]\\[]*]|\\d+)");
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -10,7 +10,7 @@ public class CatalogDefinitionsConfig {
|
||||
|
||||
private static final String SEED_SUBDIRECTORY = "seed/";
|
||||
private static final String ICON_SUBDIRECTORY = "icons/";
|
||||
private static final String LOCAL_CONNECTOR_CATALOG_FILE_NAME = "oss_catalog.json";
|
||||
private static final String LOCAL_CONNECTOR_CATALOG_FILE_NAME = "oss_registry.json";
|
||||
private static final String DEFAULT_LOCAL_CONNECTOR_CATALOG_PATH =
|
||||
SEED_SUBDIRECTORY + LOCAL_CONNECTOR_CATALOG_FILE_NAME;
|
||||
|
||||
|
||||
@@ -18,10 +18,3 @@ dependencies {
|
||||
}
|
||||
|
||||
Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project)
|
||||
|
||||
task validateIcons(type: JavaExec, dependsOn: [compileJava]) {
|
||||
classpath = sourceSets.main.runtimeClasspath
|
||||
mainClass = 'io.airbyte.configoss.init.IconValidationTask'
|
||||
}
|
||||
|
||||
validateIcons.shouldRunAfter processResources
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.init;
|
||||
|
||||
import com.google.common.io.Resources;
|
||||
import io.airbyte.configoss.CatalogDefinitionsConfig;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Simple task that checks if all icons in the seed definition files exist as well as that no icon
|
||||
* in the icons folder is unused.
|
||||
*/
|
||||
public class IconValidationTask {
|
||||
|
||||
private static final String ICON_SUB_DIRECTORY = CatalogDefinitionsConfig.getIconSubdirectory();
|
||||
|
||||
private static Path getIconDirectoryPath() {
|
||||
try {
|
||||
final URI localIconsUri = Resources.getResource(ICON_SUB_DIRECTORY).toURI();
|
||||
return Path.of(localIconsUri);
|
||||
} catch (final URISyntaxException e) {
|
||||
throw new RuntimeException("Failed to fetch local icon directory path", e);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<String> getLocalIconFileNames() {
|
||||
try {
|
||||
final Path iconDirectoryPath = getIconDirectoryPath();
|
||||
return Files.list(iconDirectoryPath).map(path -> path.getFileName().toString()).toList();
|
||||
} catch (final IOException e) {
|
||||
throw new RuntimeException("Failed to fetch local icon files", e);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<String> getIconFileNamesFromCatalog() {
|
||||
final LocalDefinitionsProvider localDefinitionsProvider = new LocalDefinitionsProvider();
|
||||
final List<String> sourceIcons = localDefinitionsProvider
|
||||
.getSourceDefinitions()
|
||||
.stream().map(s -> s.getIcon())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
final List<String> destinationIcons = localDefinitionsProvider
|
||||
.getDestinationDefinitions()
|
||||
.stream().map(s -> s.getIcon())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// concat the two lists one
|
||||
sourceIcons.addAll(destinationIcons);
|
||||
|
||||
// remove all null values
|
||||
sourceIcons.removeAll(Collections.singleton(null));
|
||||
|
||||
return sourceIcons;
|
||||
}
|
||||
|
||||
private static List<String> difference(final List<String> list1, final List<String> list2) {
|
||||
final List<String> difference = new ArrayList<>(list1);
|
||||
difference.removeAll(list2);
|
||||
return difference;
|
||||
}
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final List<String> catalogIconFileNames = getIconFileNamesFromCatalog();
|
||||
final List<String> localIconFileNames = getLocalIconFileNames();
|
||||
|
||||
final List<String> missingIcons = difference(catalogIconFileNames, localIconFileNames);
|
||||
final List<String> unusedIcons = difference(localIconFileNames, catalogIconFileNames);
|
||||
|
||||
final List<String> errorMessages = List.of();
|
||||
if (!missingIcons.isEmpty()) {
|
||||
errorMessages
|
||||
.add("The following icon files have been referenced inside the seed files, but don't exist:\n\n" + String.join(", ", missingIcons));
|
||||
}
|
||||
|
||||
if (!unusedIcons.isEmpty()) {
|
||||
errorMessages.add("The following icons are not used in the seed files and should be removed:\n\n" + String.join(", ", unusedIcons));
|
||||
}
|
||||
|
||||
if (!errorMessages.isEmpty()) {
|
||||
throw new RuntimeException(String.join("\n\n", errorMessages));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -24,11 +24,11 @@ import java.util.stream.Collectors;
|
||||
*/
|
||||
final public class LocalDefinitionsProvider implements DefinitionsProvider {
|
||||
|
||||
private static final String LOCAL_CONNECTOR_CATALOG_PATH = CatalogDefinitionsConfig.getLocalConnectorCatalogPath();
|
||||
private static final String LOCAL_CONNECTOR_REGISTRY_PATH = CatalogDefinitionsConfig.getLocalConnectorCatalogPath();
|
||||
|
||||
public CombinedConnectorCatalog getLocalDefinitionCatalog() {
|
||||
try {
|
||||
final URL url = Resources.getResource(LOCAL_CONNECTOR_CATALOG_PATH);
|
||||
final URL url = Resources.getResource(LOCAL_CONNECTOR_REGISTRY_PATH);
|
||||
final String jsonString = Resources.toString(url, StandardCharsets.UTF_8);
|
||||
final CombinedConnectorCatalog catalog = Jsons.deserialize(jsonString, CombinedConnectorCatalog.class);
|
||||
return catalog;
|
||||
@@ -63,7 +63,7 @@ final public class LocalDefinitionsProvider implements DefinitionsProvider {
|
||||
public StandardSourceDefinition getSourceDefinition(final UUID definitionId) throws ConfigNotFoundException {
|
||||
final StandardSourceDefinition definition = getSourceDefinitionsMap().get(definitionId);
|
||||
if (definition == null) {
|
||||
throw new ConfigNotFoundException(SeedType.STANDARD_SOURCE_DEFINITION.name(), definitionId.toString());
|
||||
throw new ConfigNotFoundException("local_registry:source_def", definitionId.toString());
|
||||
}
|
||||
return definition;
|
||||
}
|
||||
@@ -77,8 +77,7 @@ final public class LocalDefinitionsProvider implements DefinitionsProvider {
|
||||
public StandardDestinationDefinition getDestinationDefinition(final UUID definitionId) throws ConfigNotFoundException {
|
||||
final StandardDestinationDefinition definition = getDestinationDefinitionsMap().get(definitionId);
|
||||
if (definition == null) {
|
||||
// TODO remove the reference to the enum
|
||||
throw new ConfigNotFoundException(SeedType.STANDARD_DESTINATION_DEFINITION.name(), definitionId.toString());
|
||||
throw new ConfigNotFoundException("local_registry:destination_def", definitionId.toString());
|
||||
}
|
||||
return definition;
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ public class RemoteDefinitionsProvider implements DefinitionsProvider {
|
||||
public StandardSourceDefinition getSourceDefinition(final UUID definitionId) throws ConfigNotFoundException {
|
||||
final StandardSourceDefinition definition = getSourceDefinitionsMap().get(definitionId);
|
||||
if (definition == null) {
|
||||
throw new ConfigNotFoundException(SeedType.STANDARD_SOURCE_DEFINITION.name(), definitionId.toString());
|
||||
throw new ConfigNotFoundException("remote_registry:source_def", definitionId.toString());
|
||||
}
|
||||
return definition;
|
||||
}
|
||||
@@ -86,7 +86,7 @@ public class RemoteDefinitionsProvider implements DefinitionsProvider {
|
||||
public StandardDestinationDefinition getDestinationDefinition(final UUID definitionId) throws ConfigNotFoundException {
|
||||
final StandardDestinationDefinition definition = getDestinationDefinitionsMap().get(definitionId);
|
||||
if (definition == null) {
|
||||
throw new ConfigNotFoundException(SeedType.STANDARD_DESTINATION_DEFINITION.name(), definitionId.toString());
|
||||
throw new ConfigNotFoundException("remote_registry:destination_def", definitionId.toString());
|
||||
}
|
||||
return definition;
|
||||
}
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.init;
|
||||
|
||||
public enum SeedType {
|
||||
|
||||
STANDARD_SOURCE_DEFINITION("/seed/source_definitions.yaml", "sourceDefinitionId"),
|
||||
STANDARD_DESTINATION_DEFINITION("/seed/destination_definitions.yaml", "destinationDefinitionId"),
|
||||
SOURCE_SPEC("/seed/source_specs.yaml", "dockerImage"),
|
||||
DESTINATION_SPEC("/seed/destination_specs.yaml", "dockerImage");
|
||||
|
||||
final String resourcePath;
|
||||
// ID field name
|
||||
final String idName;
|
||||
|
||||
SeedType(final String resourcePath, final String idName) {
|
||||
this.resourcePath = resourcePath;
|
||||
this.idName = idName;
|
||||
}
|
||||
|
||||
public String getResourcePath() {
|
||||
return resourcePath;
|
||||
}
|
||||
|
||||
public String getIdName() {
|
||||
return idName;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,483 +0,0 @@
|
||||
- name: Azure Blob Storage
|
||||
destinationDefinitionId: b4c5d105-31fd-4817-96b6-cb923bfc04cb
|
||||
dockerRepository: airbyte/destination-azure-blob-storage
|
||||
dockerImageTag: 0.2.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/azureblobstorage
|
||||
icon: azureblobstorage.svg
|
||||
resourceRequirements:
|
||||
jobSpecific:
|
||||
- jobType: sync
|
||||
resourceRequirements:
|
||||
memory_limit: "1Gi"
|
||||
memory_request: "1Gi"
|
||||
releaseStage: alpha
|
||||
- name: Amazon SQS
|
||||
destinationDefinitionId: 0eeee7fb-518f-4045-bacc-9619e31c43ea
|
||||
dockerRepository: airbyte/destination-amazon-sqs
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/amazon-sqs
|
||||
icon: awssqs.svg
|
||||
releaseStage: alpha
|
||||
- name: Apache Doris
|
||||
destinationDefinitionId: 05c161bf-ca73-4d48-b524-d392be417002
|
||||
dockerRepository: airbyte/destination-doris
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/doris
|
||||
icon: apachedoris.svg
|
||||
releaseStage: alpha
|
||||
- name: Apache Iceberg
|
||||
destinationDefinitionId: df65a8f3-9908-451b-aa9b-445462803560
|
||||
dockerRepository: airbyte/destination-iceberg
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/iceberg
|
||||
releaseStage: alpha
|
||||
- name: AWS Datalake
|
||||
destinationDefinitionId: 99878c90-0fbd-46d3-9d98-ffde879d17fc
|
||||
dockerRepository: airbyte/destination-aws-datalake
|
||||
dockerImageTag: 0.1.3
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/aws-datalake
|
||||
icon: awsdatalake.svg
|
||||
releaseStage: alpha
|
||||
- name: BigQuery
|
||||
destinationDefinitionId: 22f6c74f-5699-40ff-833c-4a879ea40133
|
||||
dockerRepository: airbyte/destination-bigquery
|
||||
dockerImageTag: 1.4.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery
|
||||
icon: bigquery.svg
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: bigquery
|
||||
supportsDbt: true
|
||||
resourceRequirements:
|
||||
jobSpecific:
|
||||
- jobType: sync
|
||||
resourceRequirements:
|
||||
memory_limit: "1Gi"
|
||||
memory_request: "1Gi"
|
||||
releaseStage: generally_available
|
||||
- name: BigQuery (denormalized typed struct)
|
||||
destinationDefinitionId: 079d5540-f236-4294-ba7c-ade8fd918496
|
||||
dockerRepository: airbyte/destination-bigquery-denormalized
|
||||
dockerImageTag: 1.4.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery
|
||||
icon: bigquery.svg
|
||||
resourceRequirements:
|
||||
jobSpecific:
|
||||
- jobType: sync
|
||||
resourceRequirements:
|
||||
memory_limit: "1Gi"
|
||||
memory_request: "1Gi"
|
||||
releaseStage: beta
|
||||
- name: Cassandra
|
||||
destinationDefinitionId: 707456df-6f4f-4ced-b5c6-03f73bcad1c5
|
||||
dockerRepository: airbyte/destination-cassandra
|
||||
dockerImageTag: 0.1.4
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/cassandra
|
||||
icon: cassandra.svg
|
||||
releaseStage: alpha
|
||||
- name: Cumul.io
|
||||
destinationDefinitionId: e088acb6-9780-4568-880c-54c2dd7f431b
|
||||
dockerRepository: airbyte/destination-cumulio
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/cumulio
|
||||
icon: cumulio.svg
|
||||
releaseStage: alpha
|
||||
- name: Chargify (Keen)
|
||||
destinationDefinitionId: 81740ce8-d764-4ea7-94df-16bb41de36ae
|
||||
dockerRepository: airbyte/destination-keen
|
||||
dockerImageTag: 0.2.4
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/keen
|
||||
icon: chargify.svg
|
||||
releaseStage: alpha
|
||||
- name: Clickhouse
|
||||
destinationDefinitionId: ce0d828e-1dc4-496c-b122-2da42e637e48
|
||||
dockerRepository: airbyte/destination-clickhouse
|
||||
dockerImageTag: 0.2.3
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/clickhouse
|
||||
icon: clickhouse.svg
|
||||
releaseStage: alpha
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization-clickhouse
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: clickhouse
|
||||
supportsDbt: false
|
||||
- name: Cloudflare R2
|
||||
destinationDefinitionId: 0fb07be9-7c3b-4336-850d-5efc006152ee
|
||||
dockerRepository: airbyte/destination-r2
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/r2
|
||||
icon: cloudflare-r2.svg
|
||||
releaseStage: alpha
|
||||
- name: Convex
|
||||
destinationDefinitionId: 3eb4d99c-11fa-4561-a259-fc88e0c2f8f4
|
||||
dockerRepository: airbyte/destination-convex
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.io/integrations/destinations/convex
|
||||
icon: convex.svg
|
||||
releaseStage: alpha
|
||||
- name: Starburst Galaxy
|
||||
destinationDefinitionId: 4528e960-6f7b-4412-8555-7e0097e1da17
|
||||
dockerRepository: airbyte/destination-starburst-galaxy
|
||||
dockerImageTag: 0.0.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/starburst-galaxy
|
||||
icon: starburst-galaxy.svg
|
||||
releaseStage: alpha
|
||||
- name: Databricks Lakehouse
|
||||
destinationDefinitionId: 072d5540-f236-4294-ba7c-ade8fd918496
|
||||
dockerRepository: airbyte/destination-databricks
|
||||
dockerImageTag: 1.0.2
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/databricks
|
||||
icon: databricks.svg
|
||||
releaseStage: alpha
|
||||
- name: DynamoDB
|
||||
destinationDefinitionId: 8ccd8909-4e99-4141-b48d-4984b70b2d89
|
||||
dockerRepository: airbyte/destination-dynamodb
|
||||
dockerImageTag: 0.1.7
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/dynamodb
|
||||
icon: dynamodb.svg
|
||||
releaseStage: alpha
|
||||
- name: E2E Testing
|
||||
destinationDefinitionId: 2eb65e87-983a-4fd7-b3e3-9d9dc6eb8537
|
||||
dockerRepository: airbyte/destination-e2e-test
|
||||
dockerImageTag: 0.3.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/e2e-test
|
||||
icon: airbyte.svg
|
||||
- destinationDefinitionId: 68f351a7-2745-4bef-ad7f-996b8e51bb8c
|
||||
name: ElasticSearch
|
||||
dockerRepository: airbyte/destination-elasticsearch
|
||||
dockerImageTag: 0.1.6
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/elasticsearch
|
||||
icon: elasticsearch.svg
|
||||
releaseStage: alpha
|
||||
- name: Exasol
|
||||
destinationDefinitionId: bb6071d9-6f34-4766-bec2-d1d4ed81a653
|
||||
dockerRepository: airbyte/destination-exasol
|
||||
dockerImageTag: 0.1.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/exasol
|
||||
releaseStage: alpha
|
||||
- name: Firebolt
|
||||
destinationDefinitionId: 18081484-02a5-4662-8dba-b270b582f321
|
||||
dockerRepository: airbyte/destination-firebolt
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/firebolt
|
||||
icon: firebolt.svg
|
||||
releaseStage: alpha
|
||||
supportsDbt: true
|
||||
- name: Google Cloud Storage (GCS)
|
||||
destinationDefinitionId: ca8f6566-e555-4b40-943a-545bf123117a
|
||||
dockerRepository: airbyte/destination-gcs
|
||||
dockerImageTag: 0.3.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/gcs
|
||||
icon: googlecloudstorage.svg
|
||||
resourceRequirements:
|
||||
jobSpecific:
|
||||
- jobType: sync
|
||||
resourceRequirements:
|
||||
memory_limit: "1Gi"
|
||||
memory_request: "1Gi"
|
||||
releaseStage: beta
|
||||
- name: Google Firestore
|
||||
destinationDefinitionId: 27dc7500-6d1b-40b1-8b07-e2f2aea3c9f4
|
||||
dockerRepository: airbyte/destination-firestore
|
||||
dockerImageTag: 0.1.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/firestore
|
||||
icon: firestore.svg
|
||||
releaseStage: alpha
|
||||
- name: Google PubSub
|
||||
destinationDefinitionId: 356668e2-7e34-47f3-a3b0-67a8a481b692
|
||||
dockerRepository: airbyte/destination-pubsub
|
||||
dockerImageTag: 0.2.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/pubsub
|
||||
icon: googlepubsub.svg
|
||||
releaseStage: alpha
|
||||
- name: Kafka
|
||||
destinationDefinitionId: 9f760101-60ae-462f-9ee6-b7a9dafd454d
|
||||
dockerRepository: airbyte/destination-kafka
|
||||
dockerImageTag: 0.1.10
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/kafka
|
||||
icon: kafka.svg
|
||||
releaseStage: alpha
|
||||
- name: Kinesis
|
||||
destinationDefinitionId: 6d1d66d4-26ab-4602-8d32-f85894b04955
|
||||
dockerRepository: airbyte/destination-kinesis
|
||||
dockerImageTag: 0.1.5
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/kinesis
|
||||
icon: kinesis.svg
|
||||
releaseStage: alpha
|
||||
- name: Local CSV
|
||||
destinationDefinitionId: 8be1cf83-fde1-477f-a4ad-318d23c9f3c6
|
||||
dockerRepository: airbyte/destination-csv
|
||||
dockerImageTag: 1.0.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/local-csv
|
||||
icon: file-csv.svg
|
||||
releaseStage: alpha
|
||||
- name: Local JSON
|
||||
destinationDefinitionId: a625d593-bba5-4a1c-a53d-2d246268a816
|
||||
dockerRepository: airbyte/destination-local-json
|
||||
dockerImageTag: 0.2.11
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/local-json
|
||||
icon: file-json.svg
|
||||
releaseStage: alpha
|
||||
- name: MQTT
|
||||
destinationDefinitionId: f3802bc4-5406-4752-9e8d-01e504ca8194
|
||||
dockerRepository: airbyte/destination-mqtt
|
||||
dockerImageTag: 0.1.3
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/mqtt
|
||||
icon: mqtt.svg
|
||||
releaseStage: alpha
|
||||
- name: MS SQL Server
|
||||
destinationDefinitionId: d4353156-9217-4cad-8dd7-c108fd4f74cf
|
||||
dockerRepository: airbyte/destination-mssql
|
||||
dockerImageTag: 0.1.23
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/mssql
|
||||
icon: mssql.svg
|
||||
releaseStage: alpha
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization-mssql
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: mssql
|
||||
supportsDbt: true
|
||||
- name: MeiliSearch
|
||||
destinationDefinitionId: af7c921e-5892-4ff2-b6c1-4a5ab258fb7e
|
||||
dockerRepository: airbyte/destination-meilisearch
|
||||
dockerImageTag: 1.0.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/meilisearch
|
||||
icon: meilisearch.svg
|
||||
releaseStage: alpha
|
||||
- name: MongoDB
|
||||
destinationDefinitionId: 8b746512-8c2e-6ac1-4adc-b59faafd473c
|
||||
dockerRepository: airbyte/destination-mongodb
|
||||
dockerImageTag: 0.1.9
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/mongodb
|
||||
icon: mongodb.svg
|
||||
releaseStage: alpha
|
||||
- name: MySQL
|
||||
destinationDefinitionId: ca81ee7c-3163-4246-af40-094cc31e5e42
|
||||
dockerRepository: airbyte/destination-mysql
|
||||
dockerImageTag: 0.1.20
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/mysql
|
||||
icon: mysql.svg
|
||||
releaseStage: alpha
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization-mysql
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: mysql
|
||||
supportsDbt: true
|
||||
- name: Oracle
|
||||
destinationDefinitionId: 3986776d-2319-4de9-8af8-db14c0996e72
|
||||
dockerRepository: airbyte/destination-oracle
|
||||
dockerImageTag: 0.1.19
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/oracle
|
||||
icon: oracle.svg
|
||||
releaseStage: alpha
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization-oracle
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: oracle
|
||||
supportsDbt: true
|
||||
- name: Postgres
|
||||
destinationDefinitionId: 25c5221d-dce2-4163-ade9-739ef790f503
|
||||
dockerRepository: airbyte/destination-postgres
|
||||
dockerImageTag: 0.3.27
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres
|
||||
icon: postgresql.svg
|
||||
releaseStage: alpha
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: postgres
|
||||
supportsDbt: true
|
||||
- name: Pulsar
|
||||
destinationDefinitionId: 2340cbba-358e-11ec-8d3d-0242ac130203
|
||||
dockerRepository: airbyte/destination-pulsar
|
||||
dockerImageTag: 0.1.3
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/pulsar
|
||||
icon: pulsar.svg
|
||||
releaseStage: alpha
|
||||
- name: RabbitMQ
|
||||
destinationDefinitionId: e06ad785-ad6f-4647-b2e8-3027a5c59454
|
||||
dockerRepository: airbyte/destination-rabbitmq
|
||||
dockerImageTag: 0.1.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/rabbitmq
|
||||
icon: pulsar.svg
|
||||
releaseStage: alpha
|
||||
- name: Redis
|
||||
destinationDefinitionId: d4d3fef9-e319-45c2-881a-bd02ce44cc9f
|
||||
dockerRepository: airbyte/destination-redis
|
||||
dockerImageTag: 0.1.4
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/redis
|
||||
icon: redis.svg
|
||||
releaseStage: alpha
|
||||
- name: Redshift
|
||||
destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc
|
||||
dockerRepository: airbyte/destination-redshift
|
||||
dockerImageTag: 0.4.7
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/redshift
|
||||
icon: redshift.svg
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization-redshift
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: redshift
|
||||
supportsDbt: true
|
||||
resourceRequirements:
|
||||
jobSpecific:
|
||||
- jobType: sync
|
||||
resourceRequirements:
|
||||
memory_limit: "1Gi"
|
||||
memory_request: "1Gi"
|
||||
releaseStage: beta
|
||||
- name: Redpanda
|
||||
destinationDefinitionId: 825c5ee3-ed9a-4dd1-a2b6-79ed722f7b13
|
||||
dockerRepository: airbyte/destination-redpanda
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/redpanda
|
||||
icon: redpanda.svg
|
||||
releaseStage: alpha
|
||||
- name: Rockset
|
||||
destinationDefinitionId: 2c9d93a7-9a17-4789-9de9-f46f0097eb70
|
||||
dockerRepository: airbyte/destination-rockset
|
||||
dockerImageTag: 0.1.4
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/rockset
|
||||
releaseStage: alpha
|
||||
- name: S3
|
||||
destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
|
||||
dockerRepository: airbyte/destination-s3
|
||||
dockerImageTag: 0.4.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/s3
|
||||
icon: s3.svg
|
||||
resourceRequirements:
|
||||
jobSpecific:
|
||||
- jobType: sync
|
||||
resourceRequirements:
|
||||
memory_limit: "1Gi"
|
||||
memory_request: "1Gi"
|
||||
releaseStage: generally_available
|
||||
- name: S3 Glue
|
||||
destinationDefinitionId: 471e5cab-8ed1-49f3-ba11-79c687784737
|
||||
dockerRepository: airbyte/destination-s3-glue
|
||||
dockerImageTag: 0.1.7
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/s3-glue
|
||||
icon: s3-glue.svg
|
||||
releaseStage: alpha
|
||||
- name: SelectDB
|
||||
destinationDefinitionId: 50a559a7-6323-4e33-8aa0-51dfd9dfadac
|
||||
dockerRepository: airbyte/destination-selectdb
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/selectdb
|
||||
icon: select.db
|
||||
releaseStage: alpha
|
||||
- name: SFTP-JSON
|
||||
destinationDefinitionId: e9810f61-4bab-46d2-bb22-edfc902e0644
|
||||
dockerRepository: airbyte/destination-sftp-json
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/sftp-json
|
||||
icon: sftp.svg
|
||||
releaseStage: alpha
|
||||
- name: Snowflake
|
||||
destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba
|
||||
dockerRepository: airbyte/destination-snowflake
|
||||
dockerImageTag: 1.0.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/snowflake
|
||||
icon: snowflake.svg
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization-snowflake
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: snowflake
|
||||
supportsDbt: true
|
||||
resourceRequirements:
|
||||
jobSpecific:
|
||||
- jobType: sync
|
||||
resourceRequirements:
|
||||
memory_limit: "1Gi"
|
||||
memory_request: "1Gi"
|
||||
releaseStage: generally_available
|
||||
- name: MariaDB ColumnStore
|
||||
destinationDefinitionId: 294a4790-429b-40ae-9516-49826b9702e1
|
||||
dockerRepository: airbyte/destination-mariadb-columnstore
|
||||
dockerImageTag: 0.1.7
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/mariadb-columnstore
|
||||
icon: mariadb.svg
|
||||
releaseStage: alpha
|
||||
- name: Streamr
|
||||
destinationDefinitionId: eebd85cf-60b2-4af6-9ba0-edeca01437b0
|
||||
dockerRepository: ghcr.io/devmate-cloud/streamr-airbyte-connectors
|
||||
dockerImageTag: 0.0.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/streamr
|
||||
icon: streamr.svg
|
||||
releaseStage: alpha
|
||||
- name: Scylla
|
||||
destinationDefinitionId: 3dc6f384-cd6b-4be3-ad16-a41450899bf0
|
||||
dockerRepository: airbyte/destination-scylla
|
||||
dockerImageTag: 0.1.3
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/scylla
|
||||
icon: scylla.svg
|
||||
releaseStage: alpha
|
||||
- name: Google Sheets
|
||||
destinationDefinitionId: a4cbd2d1-8dbe-4818-b8bc-b90ad782d12a
|
||||
dockerRepository: airbyte/destination-google-sheets
|
||||
dockerImageTag: 0.1.2
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/google-sheets
|
||||
icon: google-sheets.svg
|
||||
releaseStage: alpha
|
||||
- name: Local SQLite
|
||||
destinationDefinitionId: b76be0a6-27dc-4560-95f6-2623da0bd7b6
|
||||
dockerRepository: airbyte/destination-sqlite
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/local-sqlite
|
||||
icon: sqlite.svg
|
||||
releaseStage: alpha
|
||||
- name: TiDB
|
||||
destinationDefinitionId: 06ec60c7-7468-45c0-91ac-174f6e1a788b
|
||||
dockerRepository: airbyte/destination-tidb
|
||||
dockerImageTag: 0.1.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/tidb
|
||||
icon: tidb.svg
|
||||
releaseStage: alpha
|
||||
normalizationConfig:
|
||||
normalizationRepository: airbyte/normalization-tidb
|
||||
normalizationTag: 0.4.3
|
||||
normalizationIntegrationType: tidb
|
||||
supportsDbt: true
|
||||
- name: Typesense
|
||||
destinationDefinitionId: 36be8dc6-9851-49af-b776-9d4c30e4ab6a
|
||||
dockerRepository: airbyte/destination-typesense
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/typesense
|
||||
icon: typesense.svg
|
||||
releaseStage: alpha
|
||||
- name: YugabyteDB
|
||||
destinationDefinitionId: 2300fdcf-a532-419f-9f24-a014336e7966
|
||||
dockerRepository: airbyte/destination-yugabytedb
|
||||
dockerImageTag: 0.1.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/yugabytedb
|
||||
icon: yugabytedb.svg
|
||||
releaseStage: alpha
|
||||
- name: Databend
|
||||
destinationDefinitionId: 302e4d8e-08d3-4098-acd4-ac67ca365b88
|
||||
dockerRepository: airbyte/destination-databend
|
||||
dockerImageTag: 0.1.2
|
||||
icon: databend.svg
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/databend
|
||||
releaseStage: alpha
|
||||
- name: Teradata Vantage
|
||||
destinationDefinitionId: 58e6f9da-904e-11ed-a1eb-0242ac120002
|
||||
dockerRepository: airbyte/destination-teradata
|
||||
dockerImageTag: 0.1.1
|
||||
icon: teradata.svg
|
||||
documentationUrl: https://docs.airbyte.io/integrations/destinations/teradata
|
||||
releaseStage: alpha
|
||||
- name: Weaviate
|
||||
destinationDefinitionId: 7b7d7a0d-954c-45a0-bcfc-39a634b97736
|
||||
dockerRepository: airbyte/destination-weaviate
|
||||
dockerImageTag: 0.1.1
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/weaviate
|
||||
releaseStage: alpha
|
||||
icon: weaviate.svg
|
||||
- name: DuckDB
|
||||
destinationDefinitionId: 94bd199c-2ff0-4aa2-b98e-17f0acb72610
|
||||
dockerRepository: airbyte/destination-duckdb
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.io/integrations/destinations/duckdb
|
||||
icon: duckdb.svg
|
||||
releaseStage: alpha
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -5,19 +5,14 @@
|
||||
package io.airbyte.configoss.init;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.google.common.io.Resources;
|
||||
import io.airbyte.commons.util.MoreIterators;
|
||||
import io.airbyte.commons.yaml.Yamls;
|
||||
import io.airbyte.configoss.StandardDestinationDefinition;
|
||||
import io.airbyte.configoss.StandardSourceDefinition;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
@@ -75,26 +70,17 @@ class LocalDefinitionsProviderTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetSourceDefinitions() throws IOException {
|
||||
final URL url = Resources.getResource(LocalDefinitionsProvider.class, "/seed/source_definitions.yaml");
|
||||
final String yamlString = Resources.toString(url, StandardCharsets.UTF_8);
|
||||
final JsonNode configList = Yamls.deserialize(yamlString);
|
||||
final int expectedNumberOfSources = MoreIterators.toList(configList.elements()).size();
|
||||
|
||||
void testGetSourceDefinitions() {
|
||||
final List<StandardSourceDefinition> sourceDefinitions = localDefinitionsProvider.getSourceDefinitions();
|
||||
assertEquals(expectedNumberOfSources, sourceDefinitions.size());
|
||||
assertFalse(sourceDefinitions.isEmpty());
|
||||
assertTrue(sourceDefinitions.stream().allMatch(sourceDef -> sourceDef.getProtocolVersion().length() > 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetDestinationDefinitions() throws IOException {
|
||||
final URL url = Resources.getResource(LocalDefinitionsProvider.class, "/seed/destination_definitions.yaml");
|
||||
final String yamlString = Resources.toString(url, StandardCharsets.UTF_8);
|
||||
final JsonNode configList = Yamls.deserialize(yamlString);
|
||||
final int expectedNumberOfDestinations = MoreIterators.toList(configList.elements()).size();
|
||||
void testGetDestinationDefinitions() {
|
||||
final List<StandardDestinationDefinition> destinationDefinitions = localDefinitionsProvider.getDestinationDefinitions();
|
||||
assertEquals(expectedNumberOfDestinations, destinationDefinitions.size());
|
||||
assertTrue(destinationDefinitions.stream().allMatch(destDef -> destDef.getProtocolVersion().length() > 0));
|
||||
assertFalse(destinationDefinitions.isEmpty());
|
||||
assertTrue(destinationDefinitions.stream().allMatch(sourceDef -> sourceDef.getProtocolVersion().length() > 0));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -12,40 +12,39 @@ dependencies {
|
||||
implementation project(':airbyte-json-validation')
|
||||
}
|
||||
|
||||
task generateSeedConnectorSpecs(type: JavaExec, dependsOn: compileJava) {
|
||||
task downloadConnectorRegistry(type: JavaExec, dependsOn: compileJava) {
|
||||
/**
|
||||
* run this once a day. if you want to force this task to run do so with --rerun
|
||||
* e.g. ./gradlew :airbyte-config-oss:specs:downloadConnectorRegistry --info --rerun
|
||||
*/
|
||||
inputs.property("todaysDate", new Date().clearTime() )
|
||||
outputs.upToDateWhen { true }
|
||||
classpath = sourceSets.main.runtimeClasspath
|
||||
|
||||
mainClass = 'io.airbyte.configoss.specs.SeedConnectorSpecGenerator'
|
||||
|
||||
args '--seed-root'
|
||||
args new File(project(":airbyte-config-oss:init-oss").projectDir, '/src/main/resources/seed')
|
||||
mainClass = 'io.airbyte.configoss.specs.ConnectorRegistryDownloader'
|
||||
args project(":airbyte-config-oss:init-oss").projectDir
|
||||
}
|
||||
|
||||
project(":airbyte-config-oss:init-oss").tasks.processResources.dependsOn(downloadConnectorRegistry)
|
||||
project(":airbyte-config-oss:init-oss").tasks.processTestResources.dependsOn(downloadConnectorRegistry)
|
||||
project(":airbyte-config-oss:init-oss").tasks.test.dependsOn(downloadConnectorRegistry)
|
||||
|
||||
task generateConnectorSpecsMask(type: JavaExec, dependsOn: generateSeedConnectorSpecs) {
|
||||
|
||||
task downloadConnectorSpecsMask(type: JavaExec, dependsOn: compileJava) {
|
||||
/**
|
||||
* run this once a day. if you want to force this task to run do so with --rerun
|
||||
* e.g. ./gradlew :airbyte-config-oss:specs:downloadConnectorRegistry --info --rerun
|
||||
*/
|
||||
inputs.property("todaysDate", new Date().clearTime() )
|
||||
outputs.upToDateWhen { true }
|
||||
classpath = sourceSets.main.runtimeClasspath
|
||||
|
||||
mainClass = 'io.airbyte.configoss.specs.ConnectorSpecMaskGenerator'
|
||||
mainClass = 'io.airbyte.configoss.specs.ConnectorSpecMaskDownloader'
|
||||
|
||||
args '--specs-root'
|
||||
args new File(project(":airbyte-config-oss:init-oss").projectDir, '/src/main/resources/seed')
|
||||
}
|
||||
|
||||
project(":airbyte-config-oss:init-oss").tasks.processResources.dependsOn(generateConnectorSpecsMask)
|
||||
project(":airbyte-config-oss:init-oss").tasks.processResources.dependsOn(downloadConnectorSpecsMask)
|
||||
|
||||
task generateOssConnectorCatalog(type: JavaExec, dependsOn: generateSeedConnectorSpecs) {
|
||||
classpath = sourceSets.main.runtimeClasspath
|
||||
|
||||
mainClass = 'io.airbyte.configoss.specs.CombinedConnectorCatalogGenerator'
|
||||
|
||||
args '--seed-root'
|
||||
args new File(project(":airbyte-config-oss:init-oss").projectDir, '/src/main/resources/seed')
|
||||
|
||||
args '--output-filename'
|
||||
args 'oss_catalog.json'
|
||||
}
|
||||
|
||||
project(":airbyte-config-oss:init-oss").tasks.processResources.dependsOn(generateOssConnectorCatalog)
|
||||
|
||||
Task publishArtifactsTask = getPublishArtifactsTask("$rootProject.ext.version", project)
|
||||
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.BooleanNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import io.airbyte.commons.cli.Clis;
|
||||
import io.airbyte.commons.io.IOs;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.commons.util.MoreIterators;
|
||||
import io.airbyte.commons.yaml.Yamls;
|
||||
import io.airbyte.configoss.AirbyteConfigValidator;
|
||||
import io.airbyte.configoss.CombinedConnectorCatalog;
|
||||
import io.airbyte.configoss.ConfigSchema;
|
||||
import io.airbyte.configoss.DockerImageSpec;
|
||||
import io.airbyte.configoss.StandardDestinationDefinition;
|
||||
import io.airbyte.configoss.StandardSourceDefinition;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
/**
|
||||
* Generates a combined representation of the connector catalog that includes Sources, Destinations
|
||||
* and their specs all in one. This connector catalog can then be served and loaded from a
|
||||
* RemoteDefinitionsProvider.
|
||||
*/
|
||||
public class CombinedConnectorCatalogGenerator {
|
||||
|
||||
private static final Option SEED_ROOT_OPTION = Option.builder("s").longOpt("seed-root").hasArg(true).required(true)
|
||||
.desc("path to where seed resource files are stored").build();
|
||||
private static final Option OUTPUT_FILENAME_OPTION = Option.builder("o").longOpt("output-filename").hasArg(true).required(true)
|
||||
.desc("name for the generated catalog json file").build();
|
||||
private static final Options OPTIONS = new Options().addOption(SEED_ROOT_OPTION).addOption(OUTPUT_FILENAME_OPTION);
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final CommandLine parsed = Clis.parse(args, OPTIONS);
|
||||
final Path outputRoot = Path.of(parsed.getOptionValue(SEED_ROOT_OPTION.getOpt()));
|
||||
final String outputFileName = parsed.getOptionValue(OUTPUT_FILENAME_OPTION.getOpt());
|
||||
|
||||
final CombinedConnectorCatalogGenerator combinedConnectorCatalogGenerator = new CombinedConnectorCatalogGenerator();
|
||||
combinedConnectorCatalogGenerator.run(outputRoot, outputFileName);
|
||||
}
|
||||
|
||||
public void run(final Path outputRoot, final String outputFileName) {
|
||||
final List<JsonNode> destinationDefinitionsJson = getSeedJson(outputRoot, SeedConnectorType.DESTINATION.getDefinitionFileName());
|
||||
final List<JsonNode> destinationSpecsJson = getSeedJson(outputRoot, SeedConnectorType.DESTINATION.getSpecFileName());
|
||||
final List<JsonNode> sourceDefinitionsJson = getSeedJson(outputRoot, SeedConnectorType.SOURCE.getDefinitionFileName());
|
||||
final List<JsonNode> sourceSpecsJson = getSeedJson(outputRoot, SeedConnectorType.SOURCE.getSpecFileName());
|
||||
|
||||
mergeSpecsIntoDefinitions(destinationDefinitionsJson, destinationSpecsJson, ConfigSchema.STANDARD_DESTINATION_DEFINITION);
|
||||
mergeSpecsIntoDefinitions(sourceDefinitionsJson, sourceSpecsJson, ConfigSchema.STANDARD_SOURCE_DEFINITION);
|
||||
|
||||
final CombinedConnectorCatalog combinedCatalog = new CombinedConnectorCatalog()
|
||||
.withDestinations(destinationDefinitionsJson.stream().map(j -> Jsons.object(j, StandardDestinationDefinition.class)).toList())
|
||||
.withSources(sourceDefinitionsJson.stream().map(j -> Jsons.object(j, StandardSourceDefinition.class)).toList());
|
||||
|
||||
IOs.writeFile(outputRoot.resolve(outputFileName), Jsons.toPrettyString(Jsons.jsonNode(combinedCatalog)));
|
||||
}
|
||||
|
||||
private List<JsonNode> getSeedJson(final Path root, final String fileName) {
|
||||
final String jsonString = IOs.readFile(root, fileName);
|
||||
return MoreIterators.toList(Yamls.deserialize(jsonString).elements());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates all connector definitions with provided specs.
|
||||
*
|
||||
* @param definitions - List of Source or Destination Definitions as generated in the seed files
|
||||
* @param specs - List of connector specs as generated in the seed files (see
|
||||
* {@link DockerImageSpec})
|
||||
*/
|
||||
@VisibleForTesting
|
||||
void mergeSpecsIntoDefinitions(final List<JsonNode> definitions, final List<JsonNode> specs, final ConfigSchema configSchema) {
|
||||
final Map<String, JsonNode> specsByImage = specs.stream().collect(Collectors.toMap(
|
||||
json -> json.get("dockerImage").asText(),
|
||||
json -> json.get("spec")));
|
||||
|
||||
for (final JsonNode definition : definitions) {
|
||||
final String dockerImage = definition.get("dockerRepository").asText() + ":" + definition.get("dockerImageTag").asText();
|
||||
final JsonNode specConfigJson = specsByImage.get(dockerImage);
|
||||
|
||||
if (specConfigJson == null) {
|
||||
throw new UnsupportedOperationException(String.format("A spec for docker image %s was not found", dockerImage));
|
||||
}
|
||||
|
||||
((ObjectNode) definition).set("spec", specConfigJson);
|
||||
|
||||
if (!definition.hasNonNull("public")) {
|
||||
// All definitions in the catalog are public by default
|
||||
((ObjectNode) definition).set("public", BooleanNode.TRUE);
|
||||
}
|
||||
|
||||
AirbyteConfigValidator.AIRBYTE_CONFIG_VALIDATOR.ensureAsRuntime(configSchema, definition);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import io.airbyte.configoss.CatalogDefinitionsConfig;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Download connector registry from airbytehq/airbyte repository.
|
||||
*/
|
||||
public class ConnectorRegistryDownloader {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ConnectorRegistryDownloader.class);
|
||||
private static final String REMOTE_OSS_REGISTRY_URL =
|
||||
"https://connectors.airbyte.com/files/registries/v0/oss_registry.json";
|
||||
|
||||
/**
|
||||
* This method is to create a path to the resource folder in the project. This is so that it's
|
||||
* available at runtime via the getResource method.
|
||||
*/
|
||||
public static Path getResourcePath(final String projectPath, final String relativePath) {
|
||||
return Path.of(projectPath, "src/main/resources/", relativePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is to download the OSS catalog from the remote URL and save it to the local resource
|
||||
* folder.
|
||||
*/
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final String projectPath = args[0];
|
||||
final String relativeWritePath = CatalogDefinitionsConfig.getLocalCatalogWritePath();
|
||||
final Path writePath = getResourcePath(projectPath, relativeWritePath);
|
||||
|
||||
LOGGER.info("Downloading OSS connector registry from {} to {}", REMOTE_OSS_REGISTRY_URL, writePath);
|
||||
|
||||
final int timeout = 10000;
|
||||
FileUtils.copyURLToFile(new URL(REMOTE_OSS_REGISTRY_URL), writePath.toFile(), timeout, timeout);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import io.airbyte.commons.cli.Clis;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class ConnectorSpecMaskDownloader {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ConnectorSpecMaskDownloader.class);
|
||||
|
||||
private static final String REMOTE_SPEC_SECRET_MASK_URL =
|
||||
"https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml";
|
||||
public static final String MASK_FILE = "specs_secrets_mask.yaml";
|
||||
private static final Option SPEC_ROOT_OPTION = Option.builder("s").longOpt("specs-root").hasArg(true).required(true)
|
||||
.desc("path to where spec files are stored").build();
|
||||
private static final Options OPTIONS = new Options().addOption(SPEC_ROOT_OPTION);
|
||||
|
||||
/**
|
||||
* This method is to download the Spec Mask File from the remote URL and save it to the local
|
||||
* resource folder.
|
||||
*/
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final CommandLine parsed = Clis.parse(args, OPTIONS);
|
||||
final Path specRoot = Path.of(parsed.getOptionValue(SPEC_ROOT_OPTION.getOpt()));
|
||||
final Path writePath = specRoot.resolve(MASK_FILE);
|
||||
LOGGER.info("Downloading Spec Secret Mask from {} to {}", REMOTE_SPEC_SECRET_MASK_URL, writePath);
|
||||
|
||||
final int timeout = 10000;
|
||||
FileUtils.copyURLToFile(new URL(REMOTE_SPEC_SECRET_MASK_URL), writePath.toFile(), timeout, timeout);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import io.airbyte.commons.cli.Clis;
|
||||
import io.airbyte.commons.io.IOs;
|
||||
import io.airbyte.commons.yaml.Yamls;
|
||||
import io.airbyte.configoss.DockerImageSpec;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This script is responsible for generating a set of connection configuration properties that have
|
||||
* been marked as <code>secret</code> and therefore should be automatically masked if/when the
|
||||
* configuration object is logged.
|
||||
* <p>
|
||||
* Specs are stored in a separate file from the definitions in an effort to keep the definitions
|
||||
* yaml files human-readable and easily-editable, as specs can be rather large.
|
||||
* <p>
|
||||
* The generated mask file is created in the same location as the spec files provided to this
|
||||
* script.
|
||||
*/
|
||||
public class ConnectorSpecMaskGenerator {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ConnectorSpecMaskGenerator.class);
|
||||
|
||||
public static final String MASK_FILE = "specs_secrets_mask.yaml";
|
||||
private static final Option SPEC_ROOT_OPTION = Option.builder("s").longOpt("specs-root").hasArg(true).required(true)
|
||||
.desc("path to where spec files are stored").build();
|
||||
private static final Options OPTIONS = new Options().addOption(SPEC_ROOT_OPTION);
|
||||
|
||||
public static void main(final String[] args) {
|
||||
final CommandLine parsed = Clis.parse(args, OPTIONS);
|
||||
final Path specRoot = Path.of(parsed.getOptionValue(SPEC_ROOT_OPTION.getOpt()));
|
||||
|
||||
LOGGER.info("Looking for spec files in '{}'...", specRoot);
|
||||
|
||||
final File[] inputFiles = specRoot.toFile().listFiles();
|
||||
|
||||
if (inputFiles != null) {
|
||||
final Set<File> specFiles = Stream.of(inputFiles)
|
||||
.filter(file -> file.getName().endsWith("specs.yaml"))
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
LOGGER.info("Found {} spec files for processing.", specFiles.size());
|
||||
|
||||
final Set<String> secretPropertyNames = specFiles.stream().map(ConnectorSpecMaskGenerator::readFile)
|
||||
.filter(file -> file != null)
|
||||
.map(ConnectorSpecMaskGenerator::deserializeYaml)
|
||||
.flatMap(List::stream)
|
||||
.map(ConnectorSpecMaskGenerator::findSecrets)
|
||||
.flatMap(Set::stream)
|
||||
.collect(Collectors.toCollection(TreeSet::new));
|
||||
|
||||
final String outputString = String.format("# This file is generated by %s.\n", ConnectorSpecMaskGenerator.class.getName())
|
||||
+ "# Do NOT edit this file directly. See generator class for more details.\n"
|
||||
+ Yamls.serialize(Map.of("properties", secretPropertyNames));
|
||||
final Path outputPath = IOs.writeFile(specRoot.resolve(MASK_FILE), outputString);
|
||||
LOGGER.info("Finished generating spec mask file '{}'.", outputPath);
|
||||
} else {
|
||||
LOGGER.info("No spec files found in '{}'. Nothing to generate.", specRoot);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<DockerImageSpec> deserializeYaml(final String yaml) {
|
||||
return Yamls.deserialize(yaml, new TypeReference<>() {});
|
||||
}
|
||||
|
||||
private static Set<String> findSecrets(final DockerImageSpec spec) {
|
||||
final SpecMaskPropertyGenerator specMaskPropertyGenerator = new SpecMaskPropertyGenerator();
|
||||
final JsonNode properties = spec.getSpec().getConnectionSpecification().get("properties");
|
||||
return specMaskPropertyGenerator.getSecretFieldNames(properties);
|
||||
}
|
||||
|
||||
private static String readFile(final File file) {
|
||||
try {
|
||||
LOGGER.info("Reading spec file '{}'...", file.getAbsolutePath());
|
||||
return FileUtils.readFileToString(file, Charset.defaultCharset());
|
||||
} catch (final IOException e) {
|
||||
LOGGER.error("Unable to read contents of '{}'.", file.getAbsolutePath(), e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,128 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.google.cloud.storage.StorageOptions;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import io.airbyte.commons.cli.Clis;
|
||||
import io.airbyte.commons.io.IOs;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.commons.util.MoreIterators;
|
||||
import io.airbyte.commons.yaml.Yamls;
|
||||
import io.airbyte.configoss.DockerImageSpec;
|
||||
import io.airbyte.configoss.EnvConfigs;
|
||||
import io.airbyte.protocol.models.ConnectorSpecification;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This script is responsible for ensuring that up-to-date {@link ConnectorSpecification}s for every
|
||||
* connector definition in the seed are stored in a corresponding resource file, for the purpose of
|
||||
* seeding the specs into the config database on server startup. See
|
||||
* ./airbyte-config-oss/specs-oss/readme.md for more details on how this class is run and how it
|
||||
* fits into the project.
|
||||
* <p>
|
||||
* Specs are stored in a separate file from the definitions in an effort to keep the definitions
|
||||
* yaml files human-readable and easily-editable, as specs can be rather large.
|
||||
* <p>
|
||||
* Specs are fetched from the GCS spec cache bucket, so if any specs are missing from the bucket
|
||||
* then this will fail. Note that this script only pulls specs from the bucket cache; it never
|
||||
* pushes specs to the bucket. Since this script runs at build time, the decision was to depend on
|
||||
* the bucket cache rather than running a docker container to fetch the spec during the build which
|
||||
* could be slow and unwieldy. If there is a failure, check the bucket cache and figure out how to
|
||||
* get the correct spec in there.
|
||||
*/
|
||||
@SuppressWarnings("PMD.SignatureDeclareThrowsException")
|
||||
public class SeedConnectorSpecGenerator {
|
||||
|
||||
private static final String DOCKER_REPOSITORY_FIELD = "dockerRepository";
|
||||
private static final String DOCKER_IMAGE_TAG_FIELD = "dockerImageTag";
|
||||
private static final String DOCKER_IMAGE_FIELD = "dockerImage";
|
||||
private static final String SPEC_FIELD = "spec";
|
||||
private static final String SPEC_BUCKET_NAME = new EnvConfigs().getSpecCacheBucket();
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SeedConnectorSpecGenerator.class);
|
||||
|
||||
private static final Option SEED_ROOT_OPTION = Option.builder("s").longOpt("seed-root").hasArg(true).required(true)
|
||||
.desc("path to where seed resource files are stored").build();
|
||||
private static final Options OPTIONS = new Options().addOption(SEED_ROOT_OPTION);
|
||||
|
||||
private final GcsBucketSpecFetcher bucketSpecFetcher;
|
||||
|
||||
public SeedConnectorSpecGenerator(final GcsBucketSpecFetcher bucketSpecFetcher) {
|
||||
this.bucketSpecFetcher = bucketSpecFetcher;
|
||||
}
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final CommandLine parsed = Clis.parse(args, OPTIONS);
|
||||
final Path outputRoot = Path.of(parsed.getOptionValue(SEED_ROOT_OPTION.getOpt()));
|
||||
|
||||
final GcsBucketSpecFetcher bucketSpecFetcher = new GcsBucketSpecFetcher(StorageOptions.getDefaultInstance().getService(), SPEC_BUCKET_NAME);
|
||||
final SeedConnectorSpecGenerator seedConnectorSpecGenerator = new SeedConnectorSpecGenerator(bucketSpecFetcher);
|
||||
seedConnectorSpecGenerator.run(outputRoot, SeedConnectorType.SOURCE);
|
||||
seedConnectorSpecGenerator.run(outputRoot, SeedConnectorType.DESTINATION);
|
||||
}
|
||||
|
||||
public void run(final Path seedRoot, final SeedConnectorType seedConnectorType) throws IOException {
|
||||
LOGGER.info("Updating seeded {} definition specs if necessary...", seedConnectorType.name());
|
||||
|
||||
final JsonNode seedDefinitionsJson = yamlToJson(seedRoot, seedConnectorType.getDefinitionFileName());
|
||||
final JsonNode seedSpecsJson = yamlToJson(seedRoot, seedConnectorType.getSpecFileName());
|
||||
|
||||
final List<DockerImageSpec> updatedSeedSpecs = fetchUpdatedSeedSpecs(seedDefinitionsJson, seedSpecsJson);
|
||||
|
||||
final String outputString = String.format("# This file is generated by %s.\n", this.getClass().getName())
|
||||
+ "# Do NOT edit this file directly. See generator class for more details.\n"
|
||||
+ Yamls.serialize(updatedSeedSpecs);
|
||||
final Path outputPath = IOs.writeFile(seedRoot.resolve(seedConnectorType.getSpecFileName()), outputString);
|
||||
|
||||
LOGGER.info("Finished updating {}", outputPath);
|
||||
}
|
||||
|
||||
private JsonNode yamlToJson(final Path root, final String fileName) {
|
||||
final String yamlString = IOs.readFile(root, fileName);
|
||||
return Yamls.deserialize(yamlString);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
final List<DockerImageSpec> fetchUpdatedSeedSpecs(final JsonNode seedDefinitions, final JsonNode currentSeedSpecs) {
|
||||
final List<String> seedDefinitionsDockerImages = MoreIterators.toList(seedDefinitions.elements())
|
||||
.stream()
|
||||
.map(json -> String.format("%s:%s", json.get(DOCKER_REPOSITORY_FIELD).asText(), json.get(DOCKER_IMAGE_TAG_FIELD).asText()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
final Map<String, DockerImageSpec> currentSeedImageToSpec = MoreIterators.toList(currentSeedSpecs.elements())
|
||||
.stream()
|
||||
.collect(Collectors.toMap(
|
||||
json -> json.get(DOCKER_IMAGE_FIELD).asText(),
|
||||
json -> new DockerImageSpec().withDockerImage(json.get(DOCKER_IMAGE_FIELD).asText())
|
||||
.withSpec(Jsons.object(json.get(SPEC_FIELD), ConnectorSpecification.class))));
|
||||
|
||||
return seedDefinitionsDockerImages
|
||||
.stream()
|
||||
.map(dockerImage -> currentSeedImageToSpec.containsKey(dockerImage) ? currentSeedImageToSpec.get(dockerImage) : fetchSpecFromGCS(dockerImage))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private DockerImageSpec fetchSpecFromGCS(final String dockerImage) {
|
||||
LOGGER.info("Seeded spec not found for docker image {} - fetching from GCS bucket {}...", dockerImage, bucketSpecFetcher.getBucketName());
|
||||
final ConnectorSpecification spec = bucketSpecFetcher.attemptFetch(dockerImage)
|
||||
.orElseThrow(() -> new RuntimeException(String.format(
|
||||
"Failed to fetch valid spec file for docker image %s from GCS bucket %s. This will continue to fail until the connector change has been approved and published. See https://github.com/airbytehq/airbyte/tree/master/docs/connector-development#publishing-a-connector for more details.",
|
||||
dockerImage,
|
||||
bucketSpecFetcher.getBucketName())));
|
||||
return new DockerImageSpec().withDockerImage(dockerImage).withSpec(spec);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
public enum SeedConnectorType {
|
||||
|
||||
SOURCE(
|
||||
"source_definitions.yaml",
|
||||
"source_specs.yaml"),
|
||||
DESTINATION(
|
||||
"destination_definitions.yaml",
|
||||
"destination_specs.yaml");
|
||||
|
||||
private final String definitionFileName;
|
||||
private final String specFileName;
|
||||
|
||||
SeedConnectorType(final String definitionFileName,
|
||||
final String specFileName) {
|
||||
this.definitionFileName = definitionFileName;
|
||||
this.specFileName = specFileName;
|
||||
}
|
||||
|
||||
public String getDefinitionFileName() {
|
||||
return definitionFileName;
|
||||
}
|
||||
|
||||
public String getSpecFileName() {
|
||||
return specFileName;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import io.airbyte.commons.constants.AirbyteSecretConstants;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Generates a set of property names from the provided connection spec properties object that are
|
||||
* marked as secret.
|
||||
*/
|
||||
public class SpecMaskPropertyGenerator {
|
||||
|
||||
/**
|
||||
* Builds a set of property names from the provided connection spec properties object that are
|
||||
* marked as secret.
|
||||
*
|
||||
* @param properties The connection spec properties.
|
||||
* @return A set of property names that have been marked as secret.
|
||||
*/
|
||||
public Set<String> getSecretFieldNames(final JsonNode properties) {
|
||||
final Set<String> secretPropertyNames = new HashSet<>();
|
||||
if (properties != null && properties.isObject()) {
|
||||
final Iterator<Entry<String, JsonNode>> fields = properties.fields();
|
||||
while (fields.hasNext()) {
|
||||
final Entry<String, JsonNode> field = fields.next();
|
||||
|
||||
/*
|
||||
* If the current field is an object, check if it represents a secret. If it does, add it to the set
|
||||
* of property names. If not, recursively call this method again with the field value to see if it
|
||||
* contains any secrets.
|
||||
*
|
||||
* If the current field is an array, recursively call this method for each field within the value to
|
||||
* see if any of those contain any secrets.
|
||||
*/
|
||||
if (field.getValue().isObject()) {
|
||||
if (field.getValue().has(AirbyteSecretConstants.AIRBYTE_SECRET_FIELD)) {
|
||||
if (field.getValue().get(AirbyteSecretConstants.AIRBYTE_SECRET_FIELD).asBoolean()) {
|
||||
secretPropertyNames.add(field.getKey());
|
||||
}
|
||||
} else {
|
||||
secretPropertyNames.addAll(getSecretFieldNames(field.getValue()));
|
||||
}
|
||||
} else if (field.getValue().isArray()) {
|
||||
for (int i = 0; i < field.getValue().size(); i++) {
|
||||
secretPropertyNames.addAll(getSecretFieldNames(field.getValue().get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return secretPropertyNames;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,122 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.configoss.ConfigSchema;
|
||||
import io.airbyte.configoss.DockerImageSpec;
|
||||
import io.airbyte.configoss.StandardDestinationDefinition;
|
||||
import io.airbyte.protocol.models.ConnectorSpecification;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class CombinedConnectorCatalogGeneratorTest {
|
||||
|
||||
private static final UUID DEF_ID1 = UUID.randomUUID();
|
||||
private static final UUID DEF_ID2 = UUID.randomUUID();
|
||||
private static final String CONNECTOR_NAME1 = "connector1";
|
||||
private static final String CONNECTOR_NAME2 = "connector2";
|
||||
private static final String DOCUMENTATION_URL = "https://www.example.com";
|
||||
private static final String DOCKER_REPOSITORY1 = "airbyte/connector1";
|
||||
private static final String DOCKER_REPOSITORY2 = "airbyte/connector2";
|
||||
private static final String DOCKER_TAG1 = "0.1.0";
|
||||
private static final String DOCKER_TAG2 = "0.2.0";
|
||||
|
||||
private CombinedConnectorCatalogGenerator catalogGenerator;
|
||||
|
||||
@BeforeEach
|
||||
void setup() {
|
||||
catalogGenerator = new CombinedConnectorCatalogGenerator();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMergeSpecsIntoDefinitions() {
|
||||
final StandardDestinationDefinition destinationDefinition1 = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID1)
|
||||
.withDockerRepository(DOCKER_REPOSITORY1)
|
||||
.withDockerImageTag(DOCKER_TAG1)
|
||||
.withName(CONNECTOR_NAME1)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL)
|
||||
.withSpec(new ConnectorSpecification());
|
||||
final StandardDestinationDefinition destinationDefinition2 = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID2)
|
||||
.withDockerRepository(DOCKER_REPOSITORY2)
|
||||
.withDockerImageTag(DOCKER_TAG2)
|
||||
.withName(CONNECTOR_NAME2)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL)
|
||||
.withSpec(new ConnectorSpecification());
|
||||
final DockerImageSpec destinationSpec1 = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG1)
|
||||
.withSpec(new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of(
|
||||
"foo1",
|
||||
"bar1"))));
|
||||
final DockerImageSpec destinationSpec2 = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY2 + ":" + DOCKER_TAG2)
|
||||
.withSpec(new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of(
|
||||
"foo2",
|
||||
"bar2"))));
|
||||
|
||||
final List<JsonNode> definitions = List.of(Jsons.jsonNode(destinationDefinition1), Jsons.jsonNode(destinationDefinition2));
|
||||
final List<JsonNode> specs = List.of(Jsons.jsonNode(destinationSpec1), Jsons.jsonNode(destinationSpec2));
|
||||
|
||||
catalogGenerator.mergeSpecsIntoDefinitions(definitions, specs, ConfigSchema.STANDARD_DESTINATION_DEFINITION);
|
||||
|
||||
final StandardDestinationDefinition expectedDefinition1 = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID1)
|
||||
.withDockerRepository(DOCKER_REPOSITORY1)
|
||||
.withDockerImageTag(DOCKER_TAG1)
|
||||
.withName(CONNECTOR_NAME1)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL)
|
||||
.withSpec(destinationSpec1.getSpec());
|
||||
|
||||
final StandardDestinationDefinition expectedDefinition2 = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID2)
|
||||
.withDockerRepository(DOCKER_REPOSITORY2)
|
||||
.withDockerImageTag(DOCKER_TAG2)
|
||||
.withName(CONNECTOR_NAME2)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL)
|
||||
.withSpec(destinationSpec2.getSpec());
|
||||
|
||||
assertEquals(Jsons.jsonNode(expectedDefinition1), definitions.get(0));
|
||||
assertEquals(Jsons.jsonNode(expectedDefinition2), definitions.get(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMergeSpecsIntoDefinitionsThrowsOnMissingSpec() {
|
||||
final StandardDestinationDefinition destinationDefinition1 = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID1)
|
||||
.withDockerRepository(DOCKER_REPOSITORY1)
|
||||
.withDockerImageTag(DOCKER_TAG1)
|
||||
.withName(CONNECTOR_NAME1)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL)
|
||||
.withSpec(new ConnectorSpecification());
|
||||
final List<JsonNode> definitions = List.of(Jsons.jsonNode(destinationDefinition1));
|
||||
final List<JsonNode> specs = List.of();
|
||||
|
||||
assertThrows(UnsupportedOperationException.class,
|
||||
() -> catalogGenerator.mergeSpecsIntoDefinitions(definitions, specs, ConfigSchema.STANDARD_DESTINATION_DEFINITION));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMergeSpecsIntoDefinitionsThrowsOnInvalidFormat() {
|
||||
final JsonNode invalidDefinition = Jsons.jsonNode(ImmutableMap.of("dockerRepository", DOCKER_REPOSITORY1, "dockerImageTag", DOCKER_TAG1));
|
||||
final DockerImageSpec destinationSpec = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG1)
|
||||
.withSpec(new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of(
|
||||
"foo1",
|
||||
"bar1"))));
|
||||
|
||||
final List<JsonNode> definitions = List.of(Jsons.jsonNode(invalidDefinition));
|
||||
final List<JsonNode> specs = List.of(Jsons.jsonNode(destinationSpec));
|
||||
|
||||
assertThrows(RuntimeException.class,
|
||||
() -> catalogGenerator.mergeSpecsIntoDefinitions(definitions, specs, ConfigSchema.STANDARD_DESTINATION_DEFINITION));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import static io.airbyte.configoss.specs.ConnectorSpecMaskDownloader.MASK_FILE;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.commons.yaml.Yamls;
|
||||
import java.io.File;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.List;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
/**
|
||||
* Test suite for the {@link ConnectorSpecMaskDownloader} class.
|
||||
*/
|
||||
class ConnectorSpecMaskDownloaderTest {
|
||||
|
||||
@Test
|
||||
void testConnectorSpecMaskDownloader() throws Exception {
|
||||
final String directory = "src/test/resources/seed";
|
||||
final File outputFile = new File(directory, MASK_FILE);
|
||||
final String[] args = {"--specs-root", directory};
|
||||
ConnectorSpecMaskDownloader.main(args);
|
||||
assertTrue(outputFile.exists());
|
||||
|
||||
final JsonNode maskContents = Yamls.deserialize(FileUtils.readFileToString(outputFile, Charset.defaultCharset()));
|
||||
final JsonNode propertiesNode = maskContents.get("properties");
|
||||
final List<String> propertiesList = Jsons.object(propertiesNode, new TypeReference<>() {});
|
||||
|
||||
// Assert the properties list is greater than 50.
|
||||
// This is a rough sanity check to ensure that the mask file is not empty.
|
||||
assertTrue(propertiesList.size() > 50);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import static io.airbyte.configoss.specs.ConnectorSpecMaskGenerator.MASK_FILE;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.commons.yaml.Yamls;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Set;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
/**
|
||||
* Test suite for the {@link ConnectorSpecMaskGenerator} class.
|
||||
*/
|
||||
class ConnectorSpecMaskGeneratorTest {
|
||||
|
||||
@Test
|
||||
void testConnectorSpecMaskGenerator() throws IOException {
|
||||
final String directory = "src/test/resources/valid_specs";
|
||||
final File outputFile = new File(directory, MASK_FILE);
|
||||
final String[] args = {"--specs-root", directory};
|
||||
ConnectorSpecMaskGenerator.main(args);
|
||||
assertTrue(outputFile.exists());
|
||||
|
||||
final JsonNode maskContents = Yamls.deserialize(FileUtils.readFileToString(outputFile, Charset.defaultCharset()));
|
||||
assertEquals(Set.of("api_token", "auth_user_password"), Jsons.object(maskContents.get("properties"), new TypeReference<Set<String>>() {}));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConnectorSpecMaskGeneratorNoSpecs() throws IOException {
|
||||
final String directory = "src/test/resources/no_specs";
|
||||
final File outputFile = new File(directory, MASK_FILE);
|
||||
final String[] args = {"--specs-root", directory};
|
||||
ConnectorSpecMaskGenerator.main(args);
|
||||
assertFalse(outputFile.exists());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import io.airbyte.configoss.DockerImageSpec;
|
||||
import io.airbyte.configoss.StandardDestinationDefinition;
|
||||
import io.airbyte.protocol.models.ConnectorSpecification;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class SeedConnectorSpecGeneratorTest {
|
||||
|
||||
private static final UUID DEF_ID1 = UUID.randomUUID();
|
||||
private static final UUID DEF_ID2 = UUID.randomUUID();
|
||||
private static final String CONNECTOR_NAME1 = "connector1";
|
||||
private static final String CONNECTOR_NAME2 = "connector2";
|
||||
private static final String DOCUMENTATION_URL = "https://wwww.example.com";
|
||||
private static final String DOCKER_REPOSITORY1 = "airbyte/connector1";
|
||||
private static final String DOCKER_REPOSITORY2 = "airbyte/connector2";
|
||||
private static final String DOCKER_TAG1 = "0.1.0";
|
||||
private static final String DOCKER_TAG2 = "0.2.0";
|
||||
private static final String BUCKET_NAME = "bucket";
|
||||
|
||||
private SeedConnectorSpecGenerator seedConnectorSpecGenerator;
|
||||
private GcsBucketSpecFetcher bucketSpecFetcherMock;
|
||||
|
||||
@BeforeEach
|
||||
void setup() {
|
||||
bucketSpecFetcherMock = mock(GcsBucketSpecFetcher.class);
|
||||
when(bucketSpecFetcherMock.getBucketName()).thenReturn(BUCKET_NAME);
|
||||
|
||||
seedConnectorSpecGenerator = new SeedConnectorSpecGenerator(bucketSpecFetcherMock);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMissingSpecIsFetched() {
|
||||
final StandardDestinationDefinition sourceDefinition1 = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID1)
|
||||
.withDockerRepository(DOCKER_REPOSITORY1)
|
||||
.withDockerImageTag(DOCKER_TAG1)
|
||||
.withName(CONNECTOR_NAME1)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL);
|
||||
final ConnectorSpecification spec1 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo1", "bar1")));
|
||||
final DockerImageSpec dockerImageSpec1 = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG1).withSpec(spec1);
|
||||
|
||||
final StandardDestinationDefinition sourceDefinition2 = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID2)
|
||||
.withDockerRepository(DOCKER_REPOSITORY2)
|
||||
.withDockerImageTag(DOCKER_TAG2)
|
||||
.withName(CONNECTOR_NAME2)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL);
|
||||
final ConnectorSpecification spec2 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo2", "bar2")));
|
||||
final DockerImageSpec dockerImageSpec2 = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY2 + ":" + DOCKER_TAG2).withSpec(spec2);
|
||||
|
||||
final JsonNode seedDefinitions = Jsons.jsonNode(Arrays.asList(sourceDefinition1, sourceDefinition2));
|
||||
final JsonNode seedSpecs = Jsons.jsonNode(List.of(dockerImageSpec1));
|
||||
|
||||
when(bucketSpecFetcherMock.attemptFetch(DOCKER_REPOSITORY2 + ":" + DOCKER_TAG2)).thenReturn(Optional.of(spec2));
|
||||
|
||||
final List<DockerImageSpec> actualSeedSpecs = seedConnectorSpecGenerator.fetchUpdatedSeedSpecs(seedDefinitions, seedSpecs);
|
||||
final List<DockerImageSpec> expectedSeedSpecs = Arrays.asList(dockerImageSpec1, dockerImageSpec2);
|
||||
|
||||
assertEquals(expectedSeedSpecs, actualSeedSpecs);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testOutdatedSpecIsFetched() {
|
||||
final StandardDestinationDefinition sourceDefinition = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID1)
|
||||
.withDockerRepository(DOCKER_REPOSITORY1)
|
||||
.withDockerImageTag(DOCKER_TAG2)
|
||||
.withName(CONNECTOR_NAME1)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL);
|
||||
final ConnectorSpecification outdatedSpec = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of(
|
||||
"foo1",
|
||||
"bar1")));
|
||||
final DockerImageSpec outdatedDockerImageSpec = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG1)
|
||||
.withSpec(outdatedSpec);
|
||||
|
||||
final JsonNode seedDefinitions = Jsons.jsonNode(List.of(sourceDefinition));
|
||||
final JsonNode seedSpecs = Jsons.jsonNode(List.of(outdatedDockerImageSpec));
|
||||
|
||||
final ConnectorSpecification newSpec = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo2", "bar2")));
|
||||
final DockerImageSpec newDockerImageSpec = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG2).withSpec(newSpec);
|
||||
|
||||
when(bucketSpecFetcherMock.attemptFetch(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG2)).thenReturn(Optional.of(newSpec));
|
||||
|
||||
final List<DockerImageSpec> actualSeedSpecs = seedConnectorSpecGenerator.fetchUpdatedSeedSpecs(seedDefinitions, seedSpecs);
|
||||
final List<DockerImageSpec> expectedSeedSpecs = List.of(newDockerImageSpec);
|
||||
|
||||
assertEquals(expectedSeedSpecs, actualSeedSpecs);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtraneousSpecIsRemoved() {
|
||||
final StandardDestinationDefinition sourceDefinition = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID1)
|
||||
.withDockerRepository(DOCKER_REPOSITORY1)
|
||||
.withDockerImageTag(DOCKER_TAG1)
|
||||
.withName(CONNECTOR_NAME1)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL);
|
||||
final ConnectorSpecification spec1 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo1", "bar1")));
|
||||
final DockerImageSpec dockerImageSpec1 = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG1).withSpec(spec1);
|
||||
|
||||
final ConnectorSpecification spec2 = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo2", "bar2")));
|
||||
final DockerImageSpec dockerImageSpec2 = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY2 + ":" + DOCKER_TAG2).withSpec(spec2);
|
||||
|
||||
final JsonNode seedDefinitions = Jsons.jsonNode(List.of(sourceDefinition));
|
||||
final JsonNode seedSpecs = Jsons.jsonNode(Arrays.asList(dockerImageSpec1, dockerImageSpec2));
|
||||
|
||||
final List<DockerImageSpec> actualSeedSpecs = seedConnectorSpecGenerator.fetchUpdatedSeedSpecs(seedDefinitions, seedSpecs);
|
||||
final List<DockerImageSpec> expectedSeedSpecs = List.of(dockerImageSpec1);
|
||||
|
||||
assertEquals(expectedSeedSpecs, actualSeedSpecs);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNoFetchIsPerformedIfAllSpecsUpToDate() {
|
||||
final StandardDestinationDefinition sourceDefinition = new StandardDestinationDefinition()
|
||||
.withDestinationDefinitionId(DEF_ID1)
|
||||
.withDockerRepository(DOCKER_REPOSITORY1)
|
||||
.withDockerImageTag(DOCKER_TAG1)
|
||||
.withName(CONNECTOR_NAME1)
|
||||
.withDocumentationUrl(DOCUMENTATION_URL);
|
||||
final ConnectorSpecification spec = new ConnectorSpecification().withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo", "bar")));
|
||||
final DockerImageSpec dockerImageSpec = new DockerImageSpec().withDockerImage(DOCKER_REPOSITORY1 + ":" + DOCKER_TAG1).withSpec(spec);
|
||||
|
||||
final JsonNode seedDefinitions = Jsons.jsonNode(List.of(sourceDefinition));
|
||||
final JsonNode seedSpecs = Jsons.jsonNode(List.of(dockerImageSpec));
|
||||
|
||||
final List<DockerImageSpec> actualSeedSpecs = seedConnectorSpecGenerator.fetchUpdatedSeedSpecs(seedDefinitions, seedSpecs);
|
||||
final List<DockerImageSpec> expectedSeedSpecs = List.of(dockerImageSpec);
|
||||
|
||||
assertEquals(expectedSeedSpecs, actualSeedSpecs);
|
||||
verify(bucketSpecFetcherMock, never()).attemptFetch(any());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.configoss.specs;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import io.airbyte.commons.json.Jsons;
|
||||
import java.util.Set;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
/**
|
||||
* Test suite for the {@link SpecMaskPropertyGenerator} class.
|
||||
*/
|
||||
class SpecMaskPropertyGeneratorTest {
|
||||
|
||||
private SpecMaskPropertyGenerator specMaskPropertyGenerator;
|
||||
|
||||
@BeforeEach
|
||||
void setup() {
|
||||
specMaskPropertyGenerator = new SpecMaskPropertyGenerator();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSecretProperties() {
|
||||
final JsonNode json = Jsons.deserialize(
|
||||
"{\"api_key\":{\"type\":\"string\",\"description\":\"The API Key for the Airtable account.\",\"title\":\"API Key\",\"airbyte_secret\":true,\"examples\":[\"key1234567890\"],\"base_id\":{\"type\":\"string\",\"description\":\"The Base ID to integrate the data from.\",\"title\":\"Base ID\",\"examples\":[\"app1234567890\"]},\"tables\":{\"type\":\"array\",\"items\":[{\"type\":\"string\"}],\"description\":\"The list of Tables to integrate.\",\"title\":\"Tables\",\"examples\":[\"table 1\",\"table 2\"]}}}");
|
||||
final Set<String> propertyNames = specMaskPropertyGenerator.getSecretFieldNames(json);
|
||||
assertEquals(Set.of("api_key"), propertyNames);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSecretPropertiesFalse() {
|
||||
final JsonNode json = Jsons.deserialize(
|
||||
"{\"api_key\":{\"type\":\"string\",\"description\":\"The API Key for the Airtable account.\",\"title\":\"API Key\",\"airbyte_secret\":false,\"examples\":[\"key1234567890\"],\"base_id\":{\"type\":\"string\",\"description\":\"The Base ID to integrate the data from.\",\"title\":\"Base ID\",\"examples\":[\"app1234567890\"]},\"tables\":{\"type\":\"array\",\"items\":[{\"type\":\"string\"}],\"description\":\"The list of Tables to integrate.\",\"title\":\"Tables\",\"examples\":[\"table 1\",\"table 2\"]}}}");
|
||||
final Set<String> propertyNames = specMaskPropertyGenerator.getSecretFieldNames(json);
|
||||
assertEquals(0, propertyNames.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNestedSecretProperties() {
|
||||
final JsonNode json = Jsons.deserialize(
|
||||
"{\"title\":\"Authentication Method\",\"type\":\"object\",\"description\":\"The type of authentication to be used\",\"oneOf\":[{\"title\":\"None\",\"additionalProperties\":false,\"description\":\"No authentication will be used\",\"required\":[\"method\"],\"properties\":{\"method\":{\"type\":\"string\",\"const\":\"none\"}}},{\"title\":\"Api Key/Secret\",\"additionalProperties\":false,\"description\":\"Use a api key and secret combination to authenticate\",\"required\":[\"method\",\"apiKeyId\",\"apiKeySecret\"],\"properties\":{\"method\":{\"type\":\"string\",\"const\":\"secret\"},\"apiKeyId\":{\"title\":\"API Key ID\",\"description\":\"The Key ID to used when accessing an enterprise Elasticsearch instance.\",\"type\":\"string\"},\"apiKeySecret\":{\"title\":\"API Key Secret\",\"description\":\"The secret associated with the API Key ID.\",\"type\":\"string\",\"airbyte_secret\":true}}},{\"title\":\"Username/Password\",\"additionalProperties\":false,\"description\":\"Basic auth header with a username and password\",\"required\":[\"method\",\"username\",\"password\"],\"properties\":{\"method\":{\"type\":\"string\",\"const\":\"basic\"},\"username\":{\"title\":\"Username\",\"description\":\"Basic auth username to access a secure Elasticsearch server\",\"type\":\"string\"},\"password\":{\"title\":\"Password\",\"description\":\"Basic auth password to access a secure Elasticsearch server\",\"type\":\"string\",\"airbyte_secret\":true}}}]}");
|
||||
final Set<String> propertyNames = specMaskPropertyGenerator.getSecretFieldNames(json);
|
||||
assertEquals(Set.of("apiKeySecret", "password"), propertyNames);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNullProperties() {
|
||||
final Set<String> propertyNames = specMaskPropertyGenerator.getSecretFieldNames(null);
|
||||
assertEquals(0, propertyNames.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNonObjectProperties() {
|
||||
final JsonNode json = Jsons.deserialize("{\"array\":[\"foo\",\"bar\"]}");
|
||||
final Set<String> propertyNames = specMaskPropertyGenerator.getSecretFieldNames(json.get("array"));
|
||||
assertEquals(0, propertyNames.size());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
---
|
||||
- dockerImage: "airbyte/test-image:1.0.0"
|
||||
spec:
|
||||
documentationUrl: "https://github.com/airbytehq"
|
||||
connectionSpecification:
|
||||
$schema: "http://json-schema.org/draft-07/schema#"
|
||||
description: "A description for a connector."
|
||||
properties:
|
||||
additional_metrics:
|
||||
description:
|
||||
"Metrics names that are not pre-defined, such as cohort metrics\
|
||||
\ or app specific metrics."
|
||||
items:
|
||||
type: "string"
|
||||
order: 2
|
||||
title: "Additional metrics for the connector"
|
||||
type: "array"
|
||||
api_token:
|
||||
airbyte_secret: true
|
||||
description: "Adjust API key"
|
||||
order: 3
|
||||
title: "API Token"
|
||||
type: "string"
|
||||
host:
|
||||
title: "Host"
|
||||
description: "Hostname of the database."
|
||||
type: "string"
|
||||
order: 0
|
||||
port:
|
||||
title: "Port"
|
||||
description: "Port of the database."
|
||||
type: "integer"
|
||||
minimum: 0
|
||||
maximum: 65536
|
||||
default: 5432
|
||||
examples:
|
||||
- "5432"
|
||||
order: 1
|
||||
oneOf:
|
||||
- title: "Password Authentication"
|
||||
required:
|
||||
- "auth_method"
|
||||
- "auth_user_password"
|
||||
properties:
|
||||
auth_method:
|
||||
description: "Connect through password authentication"
|
||||
type: "string"
|
||||
const: "SSH_PASSWORD_AUTH"
|
||||
order: 0
|
||||
auth_user_password:
|
||||
title: "Password"
|
||||
description: "OS-level password for logging into the jump server host"
|
||||
type: "string"
|
||||
airbyte_secret: true
|
||||
order: 1
|
||||
supportsNormalization: false
|
||||
supportsDBT: false
|
||||
supported_destination_sync_modes: []
|
||||
@@ -5,17 +5,21 @@
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
import yaml
|
||||
|
||||
SOURCE_DEFINITIONS_FILE_PATH = "../../../../../airbyte-config-oss/init-oss/src/main/resources/seed/source_definitions.yaml"
|
||||
|
||||
import requests
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
CONNECTOR_REGISTRY_URL = "https://connectors.airbyte.com/files/registries/v0/oss_registry.json"
|
||||
|
||||
|
||||
def download_and_parse_registry_json():
|
||||
response = requests.get(CONNECTOR_REGISTRY_URL)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
def read_source_definitions():
|
||||
with open(SOURCE_DEFINITIONS_FILE_PATH, "r") as source_definitions_file:
|
||||
return yaml.safe_load(source_definitions_file)
|
||||
return download_and_parse_registry_json()["sources"]
|
||||
|
||||
|
||||
def find_by_release_stage(source_definitions, release_stage):
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
- destinationDefinitionId: {{uuid}}
|
||||
name: {{capitalCase name}}
|
||||
dockerRepository: airbyte/destination-{{dashCase name}}
|
||||
dockerImageTag: 0.1.0
|
||||
documentationUrl: https://docs.airbyte.com/integrations/destinations/{{dashCase name}}
|
||||
@@ -362,13 +362,6 @@ module.exports = function (plop) {
|
||||
templateFile: `${javaDestinationInput}/spec.json.hbs`,
|
||||
path: `${javaDestinationOutputRoot}/src/main/resources/spec.json`
|
||||
},
|
||||
{
|
||||
type: 'append',
|
||||
abortOnFail: true,
|
||||
path: `${definitionRoot}/seed/destination_definitions.yaml`,
|
||||
pattern: '# DESTINATION DEFINITION BY CODE GENERATOR',
|
||||
templateFile: `${javaDestinationInput}/definition.yaml.hbs`,
|
||||
},
|
||||
{
|
||||
type: 'emitSuccess',
|
||||
outputPath: javaDestinationOutputRoot,
|
||||
|
||||
@@ -126,8 +126,8 @@ def createSpotlessTarget = { pattern ->
|
||||
'tools',
|
||||
'secrets',
|
||||
'charts', // Helm charts often have injected template strings that will fail general linting. Helm linting is done separately.
|
||||
'resources/seed/*_specs.yaml', // Do not remove - this is necessary to prevent diffs in our github workflows, as the file diff check runs between the Format step and the Build step, the latter of which generates the file.
|
||||
'resources/seed/*_catalog.json', // Do not remove - this is also necessary to prevent diffs in our github workflows
|
||||
'resources/seed/*_registry.json', // Do not remove - this is also necessary to prevent diffs in our github workflows
|
||||
'airbyte-integrations/connectors/source-amplitude/unit_tests/api_data/zipped.json', // Zipped file presents as non-UTF-8 making spotless sad
|
||||
'airbyte-webapp', // The webapp module uses its own auto-formatter, so spotless is not necessary here
|
||||
'airbyte-webapp-e2e-tests', // This module also uses its own auto-formatter
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
#
|
||||
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
#
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from airbyte_api_client.model.airbyte_catalog import AirbyteCatalog
|
||||
from airbyte_api_client.model.airbyte_stream import AirbyteStream
|
||||
from airbyte_api_client.model.airbyte_stream_and_configuration import AirbyteStreamAndConfiguration
|
||||
from airbyte_api_client.model.airbyte_stream_configuration import AirbyteStreamConfiguration
|
||||
from airbyte_api_client.model.destination_sync_mode import DestinationSyncMode
|
||||
from airbyte_api_client.model.sync_mode import SyncMode
|
||||
from octavia_cli.generate.renderers import ConnectionRenderer, ConnectorSpecificationRenderer
|
||||
|
||||
pytestmark = pytest.mark.integration
|
||||
|
||||
SOURCE_SPECS = "../airbyte-config-oss/init-oss/src/main/resources/seed/source_specs.yaml"
|
||||
DESTINATION_SPECS = "../airbyte-config-oss/init-oss/src/main/resources/seed/destination_specs.yaml"
|
||||
|
||||
|
||||
def get_all_specs_params():
|
||||
with open(SOURCE_SPECS, "r") as f:
|
||||
source_specs = yaml.safe_load(f)
|
||||
with open(DESTINATION_SPECS, "r") as f:
|
||||
destination_specs = yaml.safe_load(f)
|
||||
return [pytest.param("source", spec, id=spec["dockerImage"]) for spec in source_specs] + [
|
||||
pytest.param("destination", spec, id=spec["dockerImage"]) for spec in destination_specs
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("spec_type, spec", get_all_specs_params())
|
||||
def test_render_spec(spec_type, spec, octavia_tmp_project_directory, mocker):
|
||||
renderer = ConnectorSpecificationRenderer(
|
||||
resource_name=f"resource-{spec['dockerImage']}",
|
||||
definition=mocker.Mock(
|
||||
type=spec_type,
|
||||
id="foo",
|
||||
docker_repository=spec["dockerImage"].split(":")[0],
|
||||
docker_image_tag=spec["dockerImage"].split(":")[-1],
|
||||
documentation_url=spec["spec"]["documentationUrl"],
|
||||
specification=mocker.Mock(connection_specification=spec["spec"]["connectionSpecification"]),
|
||||
),
|
||||
)
|
||||
output_path = renderer.write_yaml(octavia_tmp_project_directory)
|
||||
with open(output_path, "r") as f:
|
||||
parsed_yaml = yaml.safe_load(f)
|
||||
assert all(
|
||||
[
|
||||
expected_field in parsed_yaml
|
||||
for expected_field in [
|
||||
"resource_name",
|
||||
"definition_type",
|
||||
"definition_id",
|
||||
"definition_image",
|
||||
"definition_version",
|
||||
"configuration",
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
EXPECTED_RENDERED_YAML_PATH = f"{os.path.dirname(__file__)}/expected_rendered_yaml"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"resource_name, spec_type, input_spec_path, expected_yaml_path",
|
||||
[
|
||||
("my_postgres_source", "source", "source_postgres/input_spec.yaml", "source_postgres/expected.yaml"),
|
||||
("my_postgres_destination", "destination", "destination_postgres/input_spec.yaml", "destination_postgres/expected.yaml"),
|
||||
("my_s3_destination", "destination", "destination_s3/input_spec.yaml", "destination_s3/expected.yaml"),
|
||||
],
|
||||
)
|
||||
def test_expected_output_connector_specification_renderer(
|
||||
resource_name, spec_type, input_spec_path, expected_yaml_path, octavia_tmp_project_directory, mocker
|
||||
):
|
||||
with open(os.path.join(EXPECTED_RENDERED_YAML_PATH, input_spec_path), "r") as f:
|
||||
input_spec = yaml.safe_load(f)
|
||||
renderer = ConnectorSpecificationRenderer(
|
||||
resource_name=resource_name,
|
||||
definition=mocker.Mock(
|
||||
type=spec_type,
|
||||
id="foobar",
|
||||
docker_repository=input_spec["dockerImage"].split(":")[0],
|
||||
docker_image_tag=input_spec["dockerImage"].split(":")[-1],
|
||||
documentation_url=input_spec["spec"]["documentationUrl"],
|
||||
specification=mocker.Mock(connection_specification=input_spec["spec"]["connectionSpecification"]),
|
||||
),
|
||||
)
|
||||
output_path = renderer.write_yaml(octavia_tmp_project_directory)
|
||||
expect_output_path = os.path.join(EXPECTED_RENDERED_YAML_PATH, expected_yaml_path)
|
||||
assert filecmp.cmp(output_path, expect_output_path)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"with_normalization, expected_yaml_path",
|
||||
[
|
||||
(False, "connection/expected_without_normalization.yaml"),
|
||||
(True, "connection/expected_with_normalization.yaml"),
|
||||
],
|
||||
)
|
||||
def test_expected_output_connection_renderer(octavia_tmp_project_directory, mocker, with_normalization, expected_yaml_path):
|
||||
stream = AirbyteStream(default_cursor_field=["foo"], json_schema={}, name="my_stream", supported_sync_modes=[SyncMode("full_refresh")])
|
||||
config = AirbyteStreamConfiguration(
|
||||
alias_name="pokemon", selected=True, destination_sync_mode=DestinationSyncMode("append"), sync_mode=SyncMode("full_refresh")
|
||||
)
|
||||
catalog = AirbyteCatalog([AirbyteStreamAndConfiguration(stream=stream, config=config)])
|
||||
mock_source = mocker.Mock(resource_id="my_source_id", configuration_path="source_configuration_path", catalog=catalog)
|
||||
mock_destination = mocker.Mock(
|
||||
resource_id="my_destination_id",
|
||||
configuration_path="destination_configuration_path",
|
||||
definition=mocker.Mock(supports_dbt=with_normalization, normalization_config=mocker.Mock(supported=with_normalization)),
|
||||
)
|
||||
|
||||
renderer = ConnectionRenderer("my_new_connection", mock_source, mock_destination)
|
||||
output_path = renderer.write_yaml(octavia_tmp_project_directory)
|
||||
expect_output_path = os.path.join(EXPECTED_RENDERED_YAML_PATH, expected_yaml_path)
|
||||
assert filecmp.cmp(output_path, expect_output_path)
|
||||
@@ -50,7 +50,7 @@ setup(
|
||||
"pyyaml~=6.0",
|
||||
"analytics-python~=1.4.0",
|
||||
"python-slugify~=6.1.2",
|
||||
"urllib3<2"
|
||||
"urllib3<2",
|
||||
],
|
||||
python_requires=">=3.9.11",
|
||||
extras_require={
|
||||
|
||||
@@ -25,14 +25,12 @@ import sys
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
from slack_sdk import WebhookClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
|
||||
# Global statics
|
||||
CONNECTOR_DEFINITIONS_DIR = "./airbyte-config-oss/init-oss/src/main/resources/seed"
|
||||
SOURCE_DEFINITIONS_YAML = f"{CONNECTOR_DEFINITIONS_DIR}/source_definitions.yaml"
|
||||
DESTINATION_DEFINITIONS_YAML = f"{CONNECTOR_DEFINITIONS_DIR}/destination_definitions.yaml"
|
||||
CONNECTOR_REGISTRY_URL = "https://connectors.airbyte.com/files/registries/v0/oss_registry.json"
|
||||
CONNECTORS_ROOT_PATH = "./airbyte-integrations/connectors"
|
||||
RELEVANT_BASE_MODULES = ["base-normalization", "connector-acceptance-test"]
|
||||
CONNECTOR_BUILD_OUTPUT_URL = "https://dnsgjos7lj2fu.cloudfront.net/tests/summary/connectors"
|
||||
@@ -47,6 +45,12 @@ FAILED_LAST = []
|
||||
FAILED_2_LAST = []
|
||||
|
||||
|
||||
def download_and_parse_registry_json():
|
||||
response = requests.get(CONNECTOR_REGISTRY_URL)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
def get_status_page(connector) -> str:
|
||||
response = requests.get(f"{CONNECTOR_BUILD_OUTPUT_URL}/{connector}/index.html")
|
||||
if response.status_code == 200:
|
||||
@@ -194,19 +198,16 @@ def get_connectors_with_release_stage(definitions_yaml: List, stages: List[str])
|
||||
return [definition["dockerRepository"] for definition in definitions_yaml if definition.get("releaseStage", "alpha") in stages]
|
||||
|
||||
|
||||
def read_definitions_yaml(path: str):
|
||||
with open(path, "r") as file:
|
||||
return yaml.safe_load(file)
|
||||
|
||||
|
||||
def get_connectors_with_release_stages(base_directory: str, connectors: List[str], relevant_stages=["beta", "generally_available"]):
|
||||
# TODO currently this also excludes shared libs like source-jdbc, we probably shouldn't do that, so we can get the build status of those
|
||||
# modules as well.
|
||||
connector_label_to_connector_directory = get_docker_label_to_connector_directory(base_directory, connectors)
|
||||
|
||||
registry_data = download_and_parse_registry_json()
|
||||
|
||||
connectors_with_desired_status = get_connectors_with_release_stage(
|
||||
read_definitions_yaml(SOURCE_DEFINITIONS_YAML), relevant_stages
|
||||
) + get_connectors_with_release_stage(read_definitions_yaml(DESTINATION_DEFINITIONS_YAML), relevant_stages)
|
||||
registry_data["sources"], relevant_stages
|
||||
) + get_connectors_with_release_stage(registry_data["destinations"], relevant_stages)
|
||||
# return appropriate directory names
|
||||
return [
|
||||
connector_label_to_connector_directory[label]
|
||||
@@ -258,3 +259,12 @@ ENTRYPOINT ["python", "/airbyte/integration_code/main.py"]
|
||||
LABEL io.airbyte.version=1.0.8
|
||||
LABEL io.airbyte.name=airbyte/source-salesforce"""
|
||||
assert "airbyte/source-salesforce" == parse_dockerfile_repository_label(mock_dockerfile)
|
||||
|
||||
def test_download_and_parse_registry_json(self):
|
||||
registry_data = download_and_parse_registry_json()
|
||||
assert len(registry_data["sources"]) > 20
|
||||
assert len(registry_data["destinations"]) > 20
|
||||
|
||||
# Assert that the dockerRepository is not empty
|
||||
assert registry_data["sources"][0]["dockerRepository"]
|
||||
assert registry_data["destinations"][0]["dockerRepository"]
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# ------------- Import some defaults for the shell
|
||||
|
||||
# Source shell defaults
|
||||
# $0 is the currently running program (this file)
|
||||
this_file_directory=$(dirname $0)
|
||||
relative_path_to_defaults=$this_file_directory/../shell_defaults
|
||||
|
||||
# if a file exists there, source it. otherwise complain
|
||||
if test -f $relative_path_to_defaults; then
|
||||
# source and '.' are the same program
|
||||
source $relative_path_to_defaults
|
||||
else
|
||||
echo -e "\033[31m\nFAILED TO SOURCE TEST RUNNING OPTIONS.\033[39m"
|
||||
echo -e "\033[31mTried $relative_path_to_defaults\033[39m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set +o xtrace # +x easier human reading here
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
function check_compose_image_exist() {
|
||||
local compose_file=$1
|
||||
local tag=$2
|
||||
for img in `grep "image:" ${compose_file} | tr -d ' ' | cut -d ':' -f2`; do
|
||||
printf "\t${img}: ${tag}\n"
|
||||
if docker_tag_exists $img $tag; then
|
||||
printf "\tSTATUS: found\n\n"
|
||||
else
|
||||
printf "\tERROR: not found!\n\n" && exit 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function docker_tag_exists() {
|
||||
# Is true for images stored in the Github Container Registry
|
||||
repo=$1
|
||||
tag=$2
|
||||
# we user [[ here because test doesn't support globbing well
|
||||
if [[ $repo == ghcr* ]]
|
||||
then
|
||||
TOKEN_URL=https://ghcr.io/token\?scope\="repository:$1:pull"
|
||||
token=$(curl $TOKEN_URL | jq -r '.token' > /dev/null)
|
||||
URL=https://ghcr.io/v2/$1/manifests/$2
|
||||
echo -e "$blue_text""\tURL: $URL""$default_text"
|
||||
curl -H "Authorization: Bearer $token" --location --silent --show-error --dump-header header.txt "$URL" > /dev/null
|
||||
curl_success=$?
|
||||
else
|
||||
URL=https://hub.docker.com/v2/repositories/"$1"/tags/"$2"
|
||||
echo -e "$blue_text""\tURL: $URL""$default_text"
|
||||
curl --silent --show-error --location --dump-header header.txt "$URL" > /dev/null
|
||||
curl_success=$?
|
||||
# some bullshit to get the number out of a header that looks like this
|
||||
# < content-length: 1039
|
||||
# < x-ratelimit-limit: 180
|
||||
# < x-ratelimit-reset: 1665683196
|
||||
# < x-ratelimit-remaining: 180
|
||||
docker_rate_limit_remaining=$(grep 'x-ratelimit-remaining: ' header.txt | grep --only-matching --extended-regexp "\d+")
|
||||
# too noisy when set to < 1. Dockerhub starts complaining somewhere around 10
|
||||
if test "$docker_rate_limit_remaining" -lt 20; then
|
||||
echo -e "$red_text""We are close to a sensitive dockerhub rate limit!""$default_text"
|
||||
echo -e "$red_text""SLEEPING 60s sad times""$default_text"
|
||||
sleep 60
|
||||
docker_tag_exists $1 $2
|
||||
elif test $docker_rate_limit_remaining -lt 50; then
|
||||
echo -e "$red_text""Rate limit reported as $docker_rate_limit_remaining""$default_text"
|
||||
fi
|
||||
fi
|
||||
if test $curl_success -ne 0; then
|
||||
echo -e "$red_text""Curl Said this didn't work. Please investigate""$default_text"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
checkNormalizationImages() {
|
||||
echo -e "$blue_text""Checking Normalization images exist...""$default_text"
|
||||
local image_version;
|
||||
definition_file_path=airbyte-config-oss/init-oss/src/main/resources/seed/destination_definitions.yaml
|
||||
# -f True if file exists and is a regular file
|
||||
if ! test -f $definition_file_path; then
|
||||
echo -e "$red_text""Destination definition file not found at path! H4LP!!!""$default_text"
|
||||
fi
|
||||
normalization_image_versions=$(cat $definition_file_path | grep 'normalizationTag:' | cut -d":" -f2 | sed 's:;::' | sed -e 's:"::g' | sed -e 's:[[:space:]]::g')
|
||||
IFS=' ' read -r -a array <<< "$normalization_image_versions"
|
||||
# Get the first value of the normalization tag
|
||||
normalization_image=${array[0]}
|
||||
echo -e "$blue_text""Checking normalization images with version $normalization_image exist...""$default_text"
|
||||
VERSION=$normalization_image
|
||||
check_compose_image_exist airbyte-integrations/bases/base-normalization/docker-compose.yaml $VERSION
|
||||
}
|
||||
|
||||
checkConnectorImages() {
|
||||
echo -e "$blue_text""Checking connector images exist...""$default_text"
|
||||
CONNECTOR_DEFINITIONS=$(grep "dockerRepository" -h -A1 airbyte-config-oss/init-oss/src/main/resources/seed/*.yaml | grep -v -- "^--$" | tr -d ' ')
|
||||
[ -z "CONNECTOR_DEFINITIONS" ] && echo "ERROR: Could not find any connector definition." && exit 1
|
||||
|
||||
while IFS=":" read -r _ REPO; do
|
||||
IFS=":" read -r _ TAG
|
||||
printf "\t${REPO}: ${TAG}\n"
|
||||
if docker_tag_exists "$REPO" "$TAG"; then
|
||||
printf "\tSTATUS: found\n\n"
|
||||
else
|
||||
printf "\tERROR: not found!\n\n" && exit 1
|
||||
fi
|
||||
done <<< "${CONNECTOR_DEFINITIONS}"
|
||||
echo -e "$blue_text""Success! All connector images exist!""$default_text"
|
||||
}
|
||||
|
||||
main() {
|
||||
assert_root
|
||||
|
||||
SUBSET=${1:-all} # default to all.
|
||||
[[ ! "$SUBSET" =~ ^(all|connectors)$ ]] && echo "Usage ./tools/bin/check_image_exists.sh [all|connectors]" && exit 1
|
||||
echo -e "$blue_text""checking images for: $SUBSET""$default_text"
|
||||
|
||||
[[ "$SUBSET" =~ ^(all|connectors)$ ]] && checkNormalizationImages
|
||||
[[ "$SUBSET" =~ ^(all|connectors)$ ]] && checkConnectorImages
|
||||
echo -e "$blue_text""Image check complete.""$default_text"
|
||||
test -f header.txt && rm header.txt
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -1,15 +1,14 @@
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import yaml
|
||||
import re
|
||||
from typing import Any, Dict, Text, List
|
||||
import requests
|
||||
|
||||
CONNECTOR_REGISTRY_URL = "https://connectors.airbyte.com/files/registries/v0/oss_registry.json"
|
||||
CONNECTORS_PATH = "./airbyte-integrations/connectors/"
|
||||
NORMALIZATION_PATH = "./airbyte-integrations/bases/base-normalization/"
|
||||
DOC_PATH = "docs/integrations/"
|
||||
SOURCE_DEFINITIONS_PATH = "./airbyte-config-oss/init-oss/src/main/resources/seed/source_definitions.yaml"
|
||||
DESTINATION_DEFINITIONS_PATH = "./airbyte-config-oss/init-oss/src/main/resources/seed/destination_definitions.yaml"
|
||||
IGNORE_LIST = [
|
||||
# Java
|
||||
"/src/test/","/src/test-integration/", "/src/testFixtures/",
|
||||
@@ -36,6 +35,12 @@ IGNORED_DESTINATIONS = [
|
||||
COMMENT_TEMPLATE_PATH = ".github/comment_templates/connector_dependency_template.md"
|
||||
|
||||
|
||||
def download_and_parse_registry_json():
|
||||
response = requests.get(CONNECTOR_REGISTRY_URL)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
def main():
|
||||
# Used git diff checks airbyte-integrations/ folder only
|
||||
# See .github/workflows/report-connectors-dependency.yml file
|
||||
@@ -211,10 +216,9 @@ def write_report(depended_connectors):
|
||||
with open(COMMENT_TEMPLATE_PATH, "r") as f:
|
||||
template = f.read()
|
||||
|
||||
with open(SOURCE_DEFINITIONS_PATH, 'r') as stream:
|
||||
source_definitions = yaml.safe_load(stream)
|
||||
with open(DESTINATION_DEFINITIONS_PATH, 'r') as stream:
|
||||
destination_definitions = yaml.safe_load(stream)
|
||||
registry_data = download_and_parse_registry_json()
|
||||
source_definitions = registry_data["sources"]
|
||||
destination_definitions = registry_data["destinations"]
|
||||
|
||||
affected_sources.sort()
|
||||
affected_destinations.sort()
|
||||
|
||||
@@ -15,7 +15,6 @@ from functools import lru_cache
|
||||
from urllib.parse import parse_qsl, urljoin, urlparse
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
ORGANIZATION = "airbytehq"
|
||||
REPOSITORY = "airbyte"
|
||||
@@ -25,8 +24,7 @@ BRANCH = "master"
|
||||
WORKFLOW_PATH = ".github/workflows/test-command.yml"
|
||||
RUN_UUID_REGEX = re.compile("^UUID ([0-9a-f-]+)$")
|
||||
SLEEP = 1200
|
||||
SOURCE_DEFINITIONS = "airbyte-config-oss/init-oss/src/main/resources/seed/source_definitions.yaml"
|
||||
DESTINATION_DEFINITIONS = "./airbyte-config-oss/init-oss/src/main/resources/seed/destination_definitions.yaml"
|
||||
CONNECTOR_REGISTRY_URL = "https://connectors.airbyte.com/files/registries/v0/oss_registry.json"
|
||||
STAGES = ["alpha", "beta", "generally_available"]
|
||||
|
||||
|
||||
@@ -36,6 +34,12 @@ if not GITHUB_TOKEN:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def download_and_parse_registry_json():
|
||||
response = requests.get(CONNECTOR_REGISTRY_URL)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
def check_start_aws_runner_failed(jobs):
|
||||
"""
|
||||
!!! WARNING !!! WARNING !!! WARNING !!!
|
||||
@@ -115,11 +119,12 @@ def get_gradlew_integrations():
|
||||
@lru_cache
|
||||
def get_definitions(definition_type):
|
||||
assert definition_type in ["source", "destination"]
|
||||
filename = SOURCE_DEFINITIONS
|
||||
if definition_type == "destination":
|
||||
filename = DESTINATION_DEFINITIONS
|
||||
with open(filename) as fp:
|
||||
return yaml.safe_load(fp)
|
||||
|
||||
plural_key = definition_type + "s"
|
||||
|
||||
registry_data = download_and_parse_registry_json()
|
||||
return registry_data[plural_key]
|
||||
|
||||
|
||||
|
||||
def normalize_stage(stage):
|
||||
|
||||
@@ -12,8 +12,8 @@ assert_root
|
||||
|
||||
unset SUB_BUILD
|
||||
|
||||
LATEST_POSTGRES_SOURCE=$(grep -A1 'airbyte/source-postgres' ./airbyte-config-oss/init-oss/src/main/resources/seed/source_definitions.yaml | grep -v postgres | cut -d ' ' -f 4)
|
||||
LATEST_POSTGRES_DESTINATION=$(grep -A1 'airbyte/destination-postgres' ./airbyte-config-oss/init-oss/src/main/resources/seed/destination_definitions.yaml | grep -v postgres | cut -d ' ' -f 4)
|
||||
LATEST_POSTGRES_SOURCE=$(grep -A0 'dockerImageTag' ./airbyte-integrations/connectors/source-postgres/metadata.yaml | cut -d ' ' -f 4)
|
||||
LATEST_POSTGRES_DESTINATION=$(grep -A0 'dockerImageTag' ./airbyte-integrations/connectors/destination-postgres/metadata.yaml | cut -d ' ' -f 4)
|
||||
|
||||
git checkout master && ./gradlew clean :airbyte-integrations:connectors:source-postgres:build -x test && docker tag airbyte/source-postgres:dev airbyte/source-postgres:"$LATEST_POSTGRES_SOURCE"
|
||||
git checkout master && ./gradlew clean :airbyte-integrations:connectors:destination-postgres:build -x test && docker tag airbyte/destination-postgres:dev airbyte/destination-postgres:"$LATEST_POSTGRES_DESTINATION"
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
#
|
||||
|
||||
# pip3 install docker
|
||||
# pip3 install PyYAML
|
||||
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
import docker
|
||||
import yaml
|
||||
|
||||
SOURCE_DEFINITIONS = "airbyte-config-oss/init-oss/src/main/resources/seed/source_definitions.yaml"
|
||||
DESTINATION_DEFINITIONS = "airbyte-config-oss/init-oss/src/main/resources/seed/destination_definitions.yaml"
|
||||
SPECS_DIR = "specs"
|
||||
SPEC_FILE = "spec.json"
|
||||
PATTERNS = [
|
||||
"token",
|
||||
"secret",
|
||||
"password",
|
||||
"key",
|
||||
"client_id",
|
||||
"service_account",
|
||||
"tenant_id",
|
||||
"certificate",
|
||||
"jwt",
|
||||
"credentials",
|
||||
"app_id",
|
||||
"appid",
|
||||
]
|
||||
|
||||
|
||||
def git_toplevel():
|
||||
process = subprocess.run(
|
||||
["git", "-C", os.path.dirname(__file__), "rev-parse", "--show-toplevel"], check=True, capture_output=True, universal_newlines=True
|
||||
)
|
||||
return process.stdout.strip()
|
||||
|
||||
|
||||
def get_connectors(filename):
|
||||
toplevel = git_toplevel()
|
||||
with open(os.path.join(toplevel, filename)) as fp:
|
||||
definitions = yaml.safe_load(fp)
|
||||
res = {}
|
||||
for item in definitions:
|
||||
connector_name = item["dockerRepository"][len("airbyte/") :]
|
||||
docker_image = item["dockerRepository"] + ":" + item["dockerImageTag"]
|
||||
res[connector_name] = docker_image
|
||||
return res
|
||||
|
||||
|
||||
def docker_run(client, docker_image):
|
||||
try:
|
||||
res = client.containers.run(image=docker_image, command=["spec"], detach=False, remove=True)
|
||||
return res.decode("utf-8")
|
||||
except docker.errors.ContainerError as e:
|
||||
logging.exception(e)
|
||||
|
||||
|
||||
def get_spec(output):
|
||||
for line in output.splitlines():
|
||||
try:
|
||||
obj = json.loads(line)
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
if obj.get("type") == "SPEC":
|
||||
return obj
|
||||
|
||||
|
||||
def generate_all_specs():
|
||||
client = docker.from_env()
|
||||
connectors = get_connectors(SOURCE_DEFINITIONS) | get_connectors(DESTINATION_DEFINITIONS)
|
||||
for connector_name, docker_image in connectors.items():
|
||||
logging.info(f"docker run -ti --rm {docker_image} spec")
|
||||
output = docker_run(client, docker_image)
|
||||
if output:
|
||||
spec = get_spec(output)
|
||||
if spec:
|
||||
dirname = os.path.join(SPECS_DIR, connector_name)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
with open(os.path.join(dirname, SPEC_FILE), "w") as fp:
|
||||
fp.write(json.dumps(spec, indent=2))
|
||||
|
||||
|
||||
def iter_all_specs(dirname):
|
||||
for root, dirs, files in os.walk(dirname):
|
||||
if SPEC_FILE in files:
|
||||
filename = os.path.join(root, SPEC_FILE)
|
||||
with open(filename) as fp:
|
||||
try:
|
||||
obj = json.load(fp)
|
||||
except ValueError:
|
||||
continue
|
||||
if obj.get("type") == "SPEC":
|
||||
yield filename, obj
|
||||
|
||||
|
||||
def find_properties(properties, path=None):
|
||||
"find all properties recursively"
|
||||
if path is None:
|
||||
path = []
|
||||
|
||||
for prop_name, prop_obj in properties.items():
|
||||
if isinstance(prop_obj, dict):
|
||||
if prop_obj.get("type") == "object":
|
||||
if "properties" in prop_obj:
|
||||
yield from find_properties(prop_obj["properties"], path=path + [prop_name])
|
||||
elif "oneOf" in prop_obj:
|
||||
for n, oneof in enumerate(prop_obj["oneOf"]):
|
||||
yield from find_properties(oneof["properties"], path=path + [prop_name, f"[{n}]"])
|
||||
elif prop_obj.get("type", "string") == "array" and prop_obj["items"].get("type") == "object":
|
||||
yield from find_properties(prop_obj["items"]["properties"], path=path + [prop_name])
|
||||
else:
|
||||
yield path, prop_name, prop_obj
|
||||
|
||||
|
||||
def main():
|
||||
if not os.path.exists(SPECS_DIR):
|
||||
generate_all_specs()
|
||||
|
||||
PATTERN = re.compile("|".join(PATTERNS), re.I)
|
||||
|
||||
for filename, obj in iter_all_specs(SPECS_DIR):
|
||||
spec = obj["spec"]
|
||||
for prop_path, prop_name, prop_obj in find_properties(spec):
|
||||
if prop_obj.get("type") != "boolean" and not prop_obj.get("airbyte_secret") and PATTERN.search(prop_name):
|
||||
print(filename, ".".join(prop_path + [prop_name]))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
main()
|
||||
@@ -16,7 +16,7 @@ from ci_connector_ops.pipelines.actions import environments
|
||||
from ci_connector_ops.pipelines.bases import PytestStep, Step, StepResult, StepStatus
|
||||
from ci_connector_ops.pipelines.contexts import CIContext
|
||||
from ci_connector_ops.pipelines.utils import METADATA_FILE_NAME
|
||||
from ci_connector_ops.utils import DESTINATION_DEFINITIONS_FILE_PATH, SOURCE_DEFINITIONS_FILE_PATH, Connector
|
||||
from ci_connector_ops.utils import Connector
|
||||
from dagger import File
|
||||
|
||||
|
||||
@@ -141,8 +141,6 @@ class QaChecks(Step):
|
||||
str(self.context.connector.code_directory),
|
||||
str(self.context.connector.documentation_file_path),
|
||||
str(self.context.connector.icon_path),
|
||||
SOURCE_DEFINITIONS_FILE_PATH,
|
||||
DESTINATION_DEFINITIONS_FILE_PATH,
|
||||
]
|
||||
if (
|
||||
self.context.connector.technical_name.endswith("strict-encrypt")
|
||||
|
||||
@@ -9,7 +9,7 @@ from enum import Enum
|
||||
from functools import cached_property
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from typing import List, Optional, Set, Tuple
|
||||
|
||||
import git
|
||||
import requests
|
||||
@@ -17,12 +17,6 @@ import yaml
|
||||
from ci_credentials import SecretsManager
|
||||
from rich.console import Console
|
||||
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
# Some environments do not have a system C Yaml loader
|
||||
except ImportError:
|
||||
from yaml import Loader
|
||||
|
||||
console = Console()
|
||||
|
||||
DIFFED_BRANCH = os.environ.get("DIFFED_BRANCH", "origin/master")
|
||||
@@ -32,9 +26,6 @@ SOURCE_CONNECTOR_PATH_PREFIX = CONNECTOR_PATH_PREFIX + "/source-"
|
||||
DESTINATION_CONNECTOR_PATH_PREFIX = CONNECTOR_PATH_PREFIX + "/destination-"
|
||||
ACCEPTANCE_TEST_CONFIG_FILE_NAME = "acceptance-test-config.yml"
|
||||
AIRBYTE_DOCKER_REPO = "airbyte"
|
||||
SOURCE_DEFINITIONS_FILE_PATH = "airbyte-config-oss/init-oss/src/main/resources/seed/source_definitions.yaml"
|
||||
DESTINATION_DEFINITIONS_FILE_PATH = "airbyte-config-oss/init-oss/src/main/resources/seed/destination_definitions.yaml"
|
||||
DEFINITIONS_FILE_PATH = {"source": SOURCE_DEFINITIONS_FILE_PATH, "destination": DESTINATION_DEFINITIONS_FILE_PATH}
|
||||
|
||||
|
||||
def download_catalog(catalog_url):
|
||||
@@ -54,11 +45,6 @@ class ConnectorVersionNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def read_definitions(definitions_file_path: str) -> Dict:
|
||||
with open(definitions_file_path) as definitions_file:
|
||||
return yaml.load(definitions_file, Loader=Loader)
|
||||
|
||||
|
||||
def get_connector_name_from_path(path):
|
||||
return path.split("/")[2]
|
||||
|
||||
|
||||
@@ -109,86 +109,6 @@ cmd_test() {
|
||||
./gradlew --no-daemon --scan "$(_to_gradle_path "$path" integrationTest)"
|
||||
}
|
||||
|
||||
# Bumps connector version in Dockerfile, definitions.yaml file, and updates seeds with gradle.
|
||||
# This does not build or test, it solely manages the versions of connectors to be +1'd.
|
||||
#
|
||||
# NOTE: this does NOT update changelogs because the changelog markdown files do not have a reliable machine-readable
|
||||
# format to automatically handle this. Someday it could though: https://github.com/airbytehq/airbyte/issues/12031
|
||||
cmd_bump_version() {
|
||||
# Take params
|
||||
local connector_path
|
||||
local bump_version
|
||||
connector_path="$1" # Should look like airbyte-integrations/connectors/source-X
|
||||
bump_version="$2" || bump_version="patch"
|
||||
|
||||
# Set local constants
|
||||
connector=${connector_path#airbyte-integrations/connectors/}
|
||||
if [[ "$connector" =~ "source-" ]]; then
|
||||
connector_type="source"
|
||||
elif [[ "$connector" =~ "destination-" ]]; then
|
||||
connector_type="destination"
|
||||
else
|
||||
echo "Invalid connector_type from $connector"
|
||||
exit 1
|
||||
fi
|
||||
definitions_path="./airbyte-config-oss/init-oss/src/main/resources/seed/${connector_type}_definitions.yaml"
|
||||
dockerfile="$connector_path/Dockerfile"
|
||||
master_dockerfile="/tmp/master_${connector}_dockerfile"
|
||||
# This allows getting the contents of a file without checking it out
|
||||
git --no-pager show "origin/master:$dockerfile" > "$master_dockerfile"
|
||||
|
||||
# Current version always comes from master, this way we can always bump correctly relative to master
|
||||
# verses a potentially stale local branch
|
||||
current_version=$(_get_docker_image_version "$master_dockerfile")
|
||||
local image_name; image_name=$(_get_docker_image_name "$dockerfile")
|
||||
rm "$master_dockerfile"
|
||||
|
||||
## Create bumped version
|
||||
IFS=. read -r major_version minor_version patch_version <<<"${current_version##*-}"
|
||||
case "$bump_version" in
|
||||
"major")
|
||||
((major_version++))
|
||||
minor_version=0
|
||||
patch_version=0
|
||||
;;
|
||||
"minor")
|
||||
((minor_version++))
|
||||
patch_version=0
|
||||
;;
|
||||
"patch")
|
||||
((patch_version++))
|
||||
;;
|
||||
*)
|
||||
echo "Invalid bump_version option: $bump_version. Valid options are major, minor, patch"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
bumped_version="$major_version.$minor_version.$patch_version"
|
||||
# This image should not already exist, if it does, something weird happened
|
||||
_error_if_tag_exists "$image_name:$bumped_version"
|
||||
echo "$connector:$current_version will be bumped to $connector:$bumped_version"
|
||||
|
||||
## Write new version to files
|
||||
# 1) Dockerfile
|
||||
sed -i "s/$current_version/$bumped_version/g" "$dockerfile"
|
||||
|
||||
# 2) Definitions YAML file
|
||||
definitions_check=$(yq e ".. | select(has(\"dockerRepository\")) | select(.dockerRepository == \"$connector\")" "$definitions_path")
|
||||
|
||||
if [[ (-z "$definitions_check") ]]; then
|
||||
echo "Could not find $connector in $definitions_path, exiting 1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
connector_name=$(yq e ".[] | select(has(\"dockerRepository\")) | select(.dockerRepository == \"$connector\") | .name" "$definitions_path")
|
||||
yq e "(.[] | select(.name == \"$connector_name\").dockerImageTag)|=\"$bumped_version\"" -i "$definitions_path"
|
||||
|
||||
# 3) Seed files
|
||||
./gradlew :airbyte-config:init:processResources
|
||||
|
||||
echo "Woohoo! Successfully bumped $connector:$current_version to $connector:$bumped_version"
|
||||
}
|
||||
|
||||
cmd_publish() {
|
||||
local path=$1; shift || error "Missing target (root path of integration) $USAGE"
|
||||
[ -d "$path" ] || error "Path must be the root path of the integration"
|
||||
|
||||
Reference in New Issue
Block a user