1
0
mirror of synced 2025-12-25 02:09:19 -05:00

HACKDAY: Icon CDN (#26158)

* Move icons to connector folder

* Delete old icons

* Update upload logic

* Add icon url to definitions

* Update registry model

* Populate cdn url

* DNC butcher the pipeline

* Low hanging fruit fixes

* Fix bucket name

* Merge old and new approaches

* Fix metadata upload step

* Format

* Fix test
This commit is contained in:
Ben Church
2023-05-24 17:25:41 -07:00
committed by GitHub
parent 5707e477ad
commit 248bbf94c1
370 changed files with 255 additions and 88 deletions

View File

@@ -9,8 +9,8 @@ import dagger
from ci_connector_ops.pipelines.actions import run_steps
from ci_connector_ops.pipelines.actions.environments import with_pip_packages, with_poetry_module, with_python_base
from ci_connector_ops.pipelines.bases import Report, Step, StepResult
from ci_connector_ops.pipelines.contexts import PipelineContext, PublishConnectorContext
from ci_connector_ops.pipelines.utils import DAGGER_CONFIG, METADATA_FILE_NAME, execute_concurrently
from ci_connector_ops.pipelines.contexts import PipelineContext
from ci_connector_ops.pipelines.utils import DAGGER_CONFIG, METADATA_FILE_NAME, execute_concurrently, METADATA_ICON_FILE_NAME
METADATA_DIR = "airbyte-ci/connectors/metadata_service"
METADATA_LIB_MODULE_PATH = "lib"
@@ -29,6 +29,10 @@ def get_metadata_file_from_path(context: PipelineContext, metadata_path: Path) -
return context.get_repo_dir(str(metadata_path.parent), include=[METADATA_FILE_NAME]).file(METADATA_FILE_NAME)
def get_metadata_icon_file_from_path(context: PipelineContext, metadata_icon_path: Path) -> dagger.File:
return context.get_repo_dir(str(metadata_icon_path.parent), include=[METADATA_ICON_FILE_NAME]).file(METADATA_ICON_FILE_NAME)
# STEPS
@@ -58,15 +62,31 @@ class MetadataValidation(PoetryRun):
class MetadataUpload(PoetryRun):
def __init__(self, context: PublishConnectorContext):
title = f"Upload {context.metadata_path}"
def __init__(
self,
context: PipelineContext,
metadata_path: Path,
metadata_bucket_name: str,
metadata_service_gcs_credentials_secret: dagger.Secret,
docker_hub_username_secret: dagger.Secret,
docker_hub_password_secret: dagger.Secret,
):
title = f"Upload {metadata_path}"
self.gcs_bucket_name = metadata_bucket_name
super().__init__(context, title, METADATA_DIR, METADATA_LIB_MODULE_PATH)
# Ensure the icon file is included in the upload
base_container = self.poetry_run_container.with_file(METADATA_FILE_NAME, get_metadata_file_from_path(context, metadata_path))
metadata_icon_path = metadata_path.parent / METADATA_ICON_FILE_NAME
if metadata_icon_path.exists():
base_container = base_container.with_file(
METADATA_ICON_FILE_NAME, get_metadata_icon_file_from_path(context, metadata_icon_path)
)
self.poetry_run_container = (
self.poetry_run_container.with_file(METADATA_FILE_NAME, get_metadata_file_from_path(context, context.metadata_path))
.with_secret_variable("DOCKER_HUB_USERNAME", self.context.docker_hub_username_secret)
.with_secret_variable("DOCKER_HUB_PASSWORD", self.context.docker_hub_password_secret)
.with_secret_variable("GCS_CREDENTIALS", self.context.metadata_service_gcs_credentials_secret)
base_container.with_secret_variable("DOCKER_HUB_USERNAME", docker_hub_username_secret)
.with_secret_variable("DOCKER_HUB_PASSWORD", docker_hub_password_secret)
.with_secret_variable("GCS_CREDENTIALS", metadata_service_gcs_credentials_secret)
# The cache buster ensures we always run the upload command (in case of remote bucket change)
.with_env_variable("CACHEBUSTER", str(uuid.uuid4()))
)
@@ -77,7 +97,7 @@ class MetadataUpload(PoetryRun):
"metadata_service",
"upload",
METADATA_FILE_NAME,
self.context.metadata_bucket_name,
self.gcs_bucket_name,
]
)
@@ -254,8 +274,22 @@ async def run_metadata_upload_pipeline(
async with dagger.Connection(DAGGER_CONFIG) as dagger_client:
pipeline_context.dagger_client = dagger_client.pipeline(pipeline_context.pipeline_name)
async with pipeline_context:
gcs_credentials_secret: dagger.Secret = pipeline_context.dagger_client.host().env_variable("GCS_CREDENTIALS").secret()
docker_hub_username_secret: dagger.Secret = pipeline_context.dagger_client.host().env_variable("DOCKER_HUB_USERNAME").secret()
docker_hub_password_secret: dagger.Secret = pipeline_context.dagger_client.host().env_variable("DOCKER_HUB_PASSWORD").secret()
results = await execute_concurrently(
[MetadataUpload(pipeline_context, metadata_path, gcs_bucket_name).run for metadata_path in metadata_to_upload]
[
MetadataUpload(
context=pipeline_context,
metadata_service_gcs_credentials_secret=gcs_credentials_secret,
docker_hub_username_secret=docker_hub_username_secret,
docker_hub_password_secret=docker_hub_password_secret,
metadata_bucket_name=gcs_bucket_name,
metadata_path=metadata_path,
).run
for metadata_path in metadata_to_upload
]
)
pipeline_context.report = Report(pipeline_context, results, name="METADATA UPLOAD RESULTS")

View File

@@ -217,6 +217,15 @@ async def run_connector_publish_pipeline(context: PublishConnectorContext, semap
ConnectorReport: The reports holding publish results.
"""
metadata_upload_step = metadata.MetadataUpload(
context=context,
metadata_service_gcs_credentials_secret=context.metadata_service_gcs_credentials_secret,
docker_hub_username_secret=context.docker_hub_username_secret,
docker_hub_password_secret=context.docker_hub_password_secret,
metadata_bucket_name=context.metadata_bucket_name,
metadata_path=context.metadata_path,
)
def create_connector_report(results: List[StepResult]) -> ConnectorReport:
report = ConnectorReport(context, results, name="PUBLISH RESULTS")
context.report = report
@@ -236,7 +245,7 @@ async def run_connector_publish_pipeline(context: PublishConnectorContext, semap
context.logger.info(
"The connector version is already published. Let's upload metadata.yaml to GCS even if no version bump happened."
)
metadata_upload_results = await metadata.MetadataUpload(context).run()
metadata_upload_results = await metadata_upload_step.run()
results.append(metadata_upload_results)
if check_connector_image_results.status is not StepStatus.SUCCESS:
@@ -268,7 +277,7 @@ async def run_connector_publish_pipeline(context: PublishConnectorContext, semap
return create_connector_report(results)
# Only upload to metadata service bucket if the connector is not a pre-release.
metadata_upload_results = await metadata.MetadataUpload(context).run()
metadata_upload_results = await metadata_upload_step.run()
results.append(metadata_upload_results)
return create_connector_report(results)

View File

@@ -28,6 +28,7 @@ if TYPE_CHECKING:
DAGGER_CONFIG = Config(log_output=sys.stderr)
AIRBYTE_REPO_URL = "https://github.com/airbytehq/airbyte.git"
METADATA_FILE_NAME = "metadata.yaml"
METADATA_ICON_FILE_NAME = "icon.svg"
# This utils will probably be redundant once https://github.com/dagger/dagger/issues/3764 is implemented

View File

@@ -35,6 +35,7 @@ def download_catalog(catalog_url):
OSS_CATALOG = download_catalog(OSS_CATALOG_URL)
METADATA_FILE_NAME = "metadata.yaml"
ICON_FILE_NAME = "icon.svg"
class ConnectorInvalidNameError(Exception):
@@ -110,9 +111,8 @@ class Connector:
@property
def icon_path(self) -> Path:
if self.metadata and self.metadata.get("icon"):
return Path(f"./airbyte-config-oss/init-oss/src/main/resources/icons/{self.metadata['icon']}")
return Path(f"./airbyte-config-oss/init-oss/src/main/resources/icons/{self.name}.svg")
file_path = self.code_directory / ICON_FILE_NAME
return file_path
@property
def code_directory(self) -> Path: