## What Currently the connector image publishing flow use project name/connector directory to construct the repo name in dockerhub and ignoring the repo defined in metadata.yml file. It creates inconsistency between publishing the pulling image flow. This PR is to fix the issue ## How <!-- * Describe how code changes achieve the solution. --> ## Review guide <!-- 1. `x.py` 2. `y.py` --> ## User Impact <!-- * What is the end result perceived by the user? * If there are negative side effects, please list them. --> ## Can this PR be safely reverted and rolled back? <!-- * If unsure, leave it blank. --> - [ ] YES 💚 - [ ] NO ❌
105 lines
5.1 KiB
Groovy
105 lines
5.1 KiB
Groovy
import io.airbyte.gradle.DockerGenerateConnectorBuildArgs
|
||
import org.gradle.api.Plugin
|
||
import org.gradle.api.Project
|
||
import org.gradle.api.Task
|
||
import org.gradle.api.file.RegularFile
|
||
import org.gradle.api.plugins.ExtensionAware
|
||
import org.gradle.api.provider.Provider
|
||
import org.yaml.snakeyaml.Yaml
|
||
|
||
class AirbyteConnectorDockerConventionPlugin implements Plugin<Project> {
|
||
private static final String SHARED_DOCKERFILE = 'docker-images/Dockerfile.java-connector-non-airbyte-ci'
|
||
|
||
@Override
|
||
void apply(Project project) {
|
||
/*
|
||
We deliberately avoid importing classes from the published Airbyte Docker
|
||
plugin. That keeps buildSrc free of a compile‑time dependency on a specific
|
||
plugin version and lets this convention plugin compile before external
|
||
plugins are resolved.
|
||
*/
|
||
|
||
project.plugins.withId('io.airbyte.gradle.docker') {
|
||
def dockerExt = ((project.extensions.findByName('airbyte') as ExtensionAware)
|
||
?.extensions?.findByName('docker'))
|
||
|
||
if (!dockerExt) {
|
||
project.logger.warn('airbyte.docker extension not found; skipping convention plugin.')
|
||
return
|
||
}
|
||
|
||
/* ------------------------------------------------------------
|
||
* 1. imageName default = dockerRepository from metadata.yaml (if not already set)
|
||
* Falls back to project.name if dockerRepository is not in metadata.yaml
|
||
* ------------------------------------------------------------ */
|
||
if (!dockerExt.imageName.present) {
|
||
def metadataFile = project.layout.projectDirectory.file('metadata.yaml').asFile
|
||
if (metadataFile.exists()) {
|
||
def yaml = new Yaml()
|
||
def metadata = metadataFile.withInputStream { yaml.load(it) }
|
||
def dockerRepo = metadata?.data?.dockerRepository
|
||
|
||
if (dockerRepo) {
|
||
// Extract image name from repository (everything after last /)
|
||
def imageName = dockerRepo.split('/').last()
|
||
dockerExt.imageName.convention(imageName)
|
||
project.logger.info("Using dockerRepository from metadata.yaml: ${dockerRepo} -> imageName: ${imageName}")
|
||
} else {
|
||
dockerExt.imageName.convention(project.name)
|
||
}
|
||
} else {
|
||
dockerExt.imageName.convention(project.name)
|
||
}
|
||
}
|
||
|
||
/* ------------------------------------------------------------
|
||
* 2. Use shared Dockerfile if not overridden. This allows
|
||
* users to specify the Dockerfiles per connector.
|
||
* The core plugin’s dockerCopyDockerfile task will copy
|
||
* it into build/airbyte/docker/Dockerfile automatically.
|
||
* ------------------------------------------------------------ */
|
||
def userFile = dockerExt.dockerfile.get().asFile
|
||
if (!userFile.exists()) {
|
||
dockerExt.dockerfile.set(project.rootProject.file(SHARED_DOCKERFILE))
|
||
}
|
||
|
||
/* ------------------------------------------------------------
|
||
* 3. Task: generate buildArgs.properties from metadata.yaml
|
||
* ------------------------------------------------------------ */
|
||
def genProps = project.tasks.register(
|
||
'generateConnectorDockerBuildArgs',
|
||
DockerGenerateConnectorBuildArgs
|
||
) {
|
||
metadata.set(project.layout.projectDirectory.file('metadata.yaml'))
|
||
output.set(project.layout.buildDirectory.file('docker/buildArgs.properties'))
|
||
}
|
||
|
||
/* ------------------------------------------------------------
|
||
* 4. Lazy provider: read and inject the file generated by the
|
||
* DockerGenerateConnectorBuildArgs task as buildArgs
|
||
* ------------------------------------------------------------ */
|
||
def propsMapProvider = project.providers
|
||
.fileContents(genProps.flatMap { it.output } as Provider<RegularFile>)
|
||
.asText
|
||
.map { txt ->
|
||
txt.readLines()
|
||
.findAll { it.contains('=') }
|
||
.collectEntries { ln ->
|
||
def (k, v) = ln.split('=', 2)
|
||
[(k.trim()): v.trim()]
|
||
}
|
||
}
|
||
dockerExt.buildArgs.putAll(propsMapProvider)
|
||
|
||
/* ------------------------------------------------------------
|
||
* 5. Make dockerBuildx depend on the generator, and register
|
||
* the properties file as an input (incremental build‑cache)
|
||
* ------------------------------------------------------------ */
|
||
project.tasks.named('dockerBuildx').configure { Task t ->
|
||
t.dependsOn(genProps)
|
||
t.inputs.file(genProps.flatMap { it.output })
|
||
}
|
||
}
|
||
}
|
||
}
|