mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 05:00:31 -05:00
Compare commits
2 Commits
fix/sdk-ch
...
no-default
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
199a6d66f1 | ||
|
|
5e88ef0eb5 |
@@ -1,6 +1,5 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ARG BUILDPLATFORM
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
USER root
|
||||
@@ -32,23 +31,9 @@ ENV SHELL=/bin/zsh
|
||||
# --------------------------------------
|
||||
# Java
|
||||
# --------------------------------------
|
||||
ARG OS_ARCHITECTURE
|
||||
|
||||
RUN mkdir -p /usr/java
|
||||
RUN echo "Building on platform: $BUILDPLATFORM"
|
||||
RUN case "$BUILDPLATFORM" in \
|
||||
"linux/amd64") OS_ARCHITECTURE="x64_linux" ;; \
|
||||
"linux/arm64") OS_ARCHITECTURE="aarch64_linux" ;; \
|
||||
"darwin/amd64") OS_ARCHITECTURE="x64_mac" ;; \
|
||||
"darwin/arm64") OS_ARCHITECTURE="aarch64_mac" ;; \
|
||||
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
|
||||
esac && \
|
||||
wget "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz" && \
|
||||
mv OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz openjdk-21.0.7.tar.gz
|
||||
RUN tar -xzvf openjdk-21.0.7.tar.gz && \
|
||||
mv jdk-21.0.7+6 jdk-21 && \
|
||||
mv jdk-21 /usr/java/
|
||||
ENV JAVA_HOME=/usr/java/jdk-21
|
||||
RUN wget https://download.oracle.com/java/21/latest/jdk-21_linux-x64_bin.deb
|
||||
RUN dpkg -i ./jdk-21_linux-x64_bin.deb
|
||||
ENV JAVA_HOME=/usr/java/jdk-21-oracle-x64
|
||||
ENV PATH="$PATH:$JAVA_HOME/bin"
|
||||
# Will load a custom configuration file for Micronaut
|
||||
ENV MICRONAUT_ENVIRONMENTS=local,override
|
||||
|
||||
@@ -23,18 +23,13 @@ In the meantime, you can move onto the next step...
|
||||
|
||||
---
|
||||
|
||||
### Requirements
|
||||
|
||||
- Java 21 (LTS versions).
|
||||
> ⚠️ Java 24 and above are not supported yet and will fail with `invalid source release: 21`.
|
||||
- Gradle (comes with wrapper `./gradlew`)
|
||||
- Docker (optional, for running Kestra in containers)
|
||||
|
||||
### Development:
|
||||
|
||||
- (Optional) By default, your dev server will target `localhost:8080`. If your backend is running elsewhere, you can create `.env.development.local` under `ui` folder with this content:
|
||||
```
|
||||
VITE_APP_API_URL={myApiUrl}
|
||||
- Create a `.env.development.local` file in the `ui` folder and paste the following:
|
||||
|
||||
```bash
|
||||
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
|
||||
VITE_APP_API_URL=http://localhost:8080
|
||||
```
|
||||
|
||||
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
|
||||
@@ -79,6 +74,9 @@ kestra:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
"yoavbls.pretty-ts-errors",
|
||||
"github.vscode-github-actions",
|
||||
"vscjava.vscode-java-pack",
|
||||
"docker.docker"
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
9
.github/CONTRIBUTING.md
vendored
9
.github/CONTRIBUTING.md
vendored
@@ -37,10 +37,6 @@ The following dependencies are required to build Kestra locally:
|
||||
- Docker & Docker Compose
|
||||
- an IDE (Intellij IDEA, Eclipse or VS Code)
|
||||
|
||||
Thanks to the Kestra community, if using VSCode, you can also start development on either the frontend or backend with a bootstrapped docker container without the need to manually set up the environment.
|
||||
|
||||
Check out the [README](../.devcontainer/README.md) for set-up instructions and the associated [Dockerfile](../.devcontainer/Dockerfile) in the respository to get started.
|
||||
|
||||
To start contributing:
|
||||
- [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the repository
|
||||
- Clone the fork on your workstation:
|
||||
@@ -50,7 +46,7 @@ git clone git@github.com:{YOUR_USERNAME}/kestra.git
|
||||
cd kestra
|
||||
```
|
||||
|
||||
#### Develop on the backend
|
||||
#### Develop backend
|
||||
The backend is made with [Micronaut](https://micronaut.io).
|
||||
|
||||
Open the cloned repository in your favorite IDE. In most of decent IDEs, Gradle build will be detected and all dependencies will be downloaded.
|
||||
@@ -76,10 +72,11 @@ python3 -m pip install virtualenv
|
||||
```
|
||||
|
||||
|
||||
#### Develop on the frontend
|
||||
#### Develop frontend
|
||||
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
|
||||
|
||||
- `npm install`
|
||||
- create a file `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
|
||||
- `npm run dev` will start the development server with hot reload.
|
||||
- The server start by default on port 5173 and is reachable on `http://localhost:5173`
|
||||
- You can run `npm run build` in order to build the front-end that will be delivered from the backend (without running the `npm run dev`) above.
|
||||
|
||||
29
.github/actions/plugins-list/action.yml
vendored
Normal file
29
.github/actions/plugins-list/action.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: 'Load Kestra Plugin List'
|
||||
description: 'Composite action to load list of plugins'
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
plugin-file:
|
||||
description: "File of the plugins"
|
||||
default: './.plugins'
|
||||
required: true
|
||||
outputs:
|
||||
plugins:
|
||||
description: "List of all Kestra plugins"
|
||||
value: ${{ steps.plugins.outputs.plugins }}
|
||||
repositories:
|
||||
description: "List of all Kestra repositories of plugins"
|
||||
value: ${{ steps.plugins.outputs.repositories }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get Plugins List
|
||||
id: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | cut -d':' -f2- | xargs || echo '');
|
||||
REPOSITORIES=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | cut -d':' -f1 | uniq | sort | xargs || echo '')
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
echo "repositories=$REPOSITORIES" >> $GITHUB_OUTPUT
|
||||
20
.github/actions/setup-vars/action.yml
vendored
Normal file
20
.github/actions/setup-vars/action.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: 'Setup vars'
|
||||
description: 'Composite action to setup common vars'
|
||||
outputs:
|
||||
tag:
|
||||
description: "Git tag"
|
||||
value: ${{ steps.vars.outputs.tag }}
|
||||
commit:
|
||||
description: "Git commit"
|
||||
value: ${{ steps.vars.outputs.commit }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
# Setup vars
|
||||
- name: Set variables
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "commit=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_OUTPUT
|
||||
26
.github/dependabot.yml
vendored
26
.github/dependabot.yml
vendored
@@ -1,31 +1,26 @@
|
||||
# See GitHub's docs for more information on this file:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain dependencies for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every week
|
||||
interval: "weekly"
|
||||
day: "wednesday"
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
open-pull-requests-limit: 50
|
||||
|
||||
# Maintain dependencies for Gradle modules
|
||||
- package-ecosystem: "gradle"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to Gradle modules every week
|
||||
interval: "weekly"
|
||||
day: "wednesday"
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
open-pull-requests-limit: 50
|
||||
|
||||
# Maintain dependencies for NPM modules
|
||||
- package-ecosystem: "npm"
|
||||
@@ -36,15 +31,8 @@ updates:
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
labels: ["dependency-upgrade"]
|
||||
ignore:
|
||||
# Ignore updates of version 1.x, as we're using the beta of 2.x (still in beta)
|
||||
# Ignore updates of version 1.x, as we're using beta of 2.x
|
||||
- dependency-name: "vue-virtual-scroller"
|
||||
versions:
|
||||
- "1.x"
|
||||
|
||||
# Ignore updates to monaco-yaml, version is pinned to 5.3.1 due to patch-package script additions
|
||||
- dependency-name: "monaco-yaml"
|
||||
versions:
|
||||
- ">=5.3.2"
|
||||
versions: ["1.x"]
|
||||
|
||||
85
.github/workflows/codeql-analysis.yml
vendored
Normal file
85
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 5 * * 1'
|
||||
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Override automatic language detection by changing the below list
|
||||
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
|
||||
language: ['java', 'javascript']
|
||||
# Learn more...
|
||||
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Set up JDK
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v4
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: 21
|
||||
|
||||
- name: Setup gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
uses: gradle/actions/setup-gradle@v4
|
||||
|
||||
- name: Build with Gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
run: ./gradlew testClasses -x :ui:installFrontend -x :ui:assembleFrontend
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
if: ${{ matrix.language != 'java' }}
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
147
.github/workflows/docker.yml
vendored
Normal file
147
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
name: Create Docker images on Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retag-latest:
|
||||
description: 'Retag latest Docker images'
|
||||
required: true
|
||||
type: string
|
||||
default: "true"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
release-tag:
|
||||
description: 'Kestra Release Tag'
|
||||
required: false
|
||||
type: string
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
jobs:
|
||||
plugins:
|
||||
name: List Plugins
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
docker:
|
||||
name: Publish Docker
|
||||
needs: [ plugins ]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: jattach
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libs: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Vars
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
if [[ "${{ inputs.release-tag }}" == "" ]]; then
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="${{ inputs.release-tag }}"
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
# Download release
|
||||
- name: Download release
|
||||
uses: robinraju/release-downloader@v1.12
|
||||
with:
|
||||
tag: ${{steps.vars.outputs.tag}}
|
||||
fileName: 'kestra-*'
|
||||
out-file-path: build/executable
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Docker Build and push
|
||||
- name: Push to Docker Hub
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
|
||||
|
||||
- name: Install regctl
|
||||
if: github.event.inputs.retag-latest == 'true'
|
||||
uses: regclient/actions/regctl-installer@main
|
||||
|
||||
- name: Retag to latest
|
||||
if: github.event.inputs.retag-latest == 'true'
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ':github-actions:'
|
||||
channel: 'C02DQ1A7JLR' # _int_git channel
|
||||
158
.github/workflows/e2e.yml
vendored
Normal file
158
.github/workflows/e2e.yml
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
name: 'Reusable Workflow for Running End-to-End Tests'
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
tags:
|
||||
description: "Tags used for filtering tests to include for QA."
|
||||
type: string
|
||||
required: true
|
||||
docker-artifact-name:
|
||||
description: "The GitHub artifact containing the Kestra docker image."
|
||||
type: string
|
||||
required: false
|
||||
docker-image-tag:
|
||||
description: "The Docker image Tag for Kestra"
|
||||
default: 'kestra/kestra:develop'
|
||||
type: string
|
||||
required: true
|
||||
backend:
|
||||
description: "The Kestra backend type to be used for E2E tests."
|
||||
type: string
|
||||
required: true
|
||||
default: "postgres"
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: "The Google Service Account."
|
||||
required: false
|
||||
jobs:
|
||||
check:
|
||||
timeout-minutes: 60
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
E2E_TEST_DOCKER_DIR: ./kestra/e2e-tests/docker
|
||||
KESTRA_BASE_URL: http://127.27.27.27:8080/ui/
|
||||
steps:
|
||||
# Checkout kestra
|
||||
- name: Checkout kestra
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: kestra
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Get Docker Image
|
||||
- name: Download Kestra Image
|
||||
if: inputs.docker-artifact-name != ''
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.docker-artifact-name }}
|
||||
path: /tmp
|
||||
|
||||
- name: Load Kestra Image
|
||||
if: inputs.docker-artifact-name != ''
|
||||
run: |
|
||||
docker load --input /tmp/${{ inputs.docker-artifact-name }}.tar
|
||||
|
||||
# Docker Compose
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
if: inputs.docker-artifact-name == ''
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
# Build configuration
|
||||
- name: Create additional application configuration
|
||||
run: |
|
||||
touch ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
|
||||
|
||||
- name: Setup additional application configuration
|
||||
if: env.APPLICATION_SECRETS != null
|
||||
env:
|
||||
APPLICATION_SECRETS: ${{ secrets.APPLICATION_SECRETS }}
|
||||
run: |
|
||||
echo $APPLICATION_SECRETS | base64 -d > ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
|
||||
|
||||
# Deploy Docker Compose Stack
|
||||
- name: Run Kestra (${{ inputs.backend }})
|
||||
env:
|
||||
KESTRA_DOCKER_IMAGE: ${{ inputs.docker-image-tag }}
|
||||
run: |
|
||||
cd ${{ env.E2E_TEST_DOCKER_DIR }}
|
||||
echo "KESTRA_DOCKER_IMAGE=$KESTRA_DOCKER_IMAGE" >> .env
|
||||
docker compose -f docker-compose-${{ inputs.backend }}.yml up -d
|
||||
|
||||
- name: Install Playwright Deps
|
||||
run: |
|
||||
cd kestra
|
||||
./gradlew playwright --args="install-deps"
|
||||
|
||||
# Run E2E Tests
|
||||
- name: Wait For Kestra UI
|
||||
run: |
|
||||
# Start time
|
||||
START_TIME=$(date +%s)
|
||||
# Timeout duration in seconds (5 minutes)
|
||||
TIMEOUT_DURATION=$((5 * 60))
|
||||
while [ $(curl -s -L -o /dev/null -w %{http_code} $KESTRA_BASE_URL) != 200 ]; do
|
||||
echo -e $(date) "\tKestra server HTTP state: " $(curl -k -L -s -o /dev/null -w %{http_code} $KESTRA_BASE_URL) " (waiting for 200)";
|
||||
# Check the elapsed time
|
||||
CURRENT_TIME=$(date +%s)
|
||||
ELAPSED_TIME=$((CURRENT_TIME - START_TIME))
|
||||
# Break the loop if the elapsed time exceeds the timeout duration
|
||||
if [ $ELAPSED_TIME -ge $TIMEOUT_DURATION ]; then
|
||||
echo "Timeout reached: Exiting after 5 minutes."
|
||||
exit 1;
|
||||
fi
|
||||
sleep 2;
|
||||
done;
|
||||
echo "Kestra is running: $KESTRA_BASE_URL 🚀";
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run E2E Tests (${{ inputs.tags }})
|
||||
if: inputs.tags != ''
|
||||
run: |
|
||||
cd kestra
|
||||
./gradlew e2eTestsCheck -P tags=${{ inputs.tags }}
|
||||
|
||||
- name: Run E2E Tests
|
||||
if: inputs.tags == ''
|
||||
run: |
|
||||
cd kestra
|
||||
./gradlew e2eTestsCheck
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: build/allure-results
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/playwright') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
67
.github/workflows/generate-translations.yml
vendored
Normal file
67
.github/workflows/generate-translations.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Auto-Translate UI keys and create PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9-21 * * *" # Every hour from 9 AM to 9 PM
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retranslate_modified_keys:
|
||||
description: "Whether to re-translate modified keys even if they already have translations."
|
||||
type: choice
|
||||
options:
|
||||
- "false"
|
||||
- "true"
|
||||
default: "false"
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
translations:
|
||||
name: Translations
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Commit and create PR
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="chore/update-translations-$(date +%s)"
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): localize to languages other than English"
|
||||
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
|
||||
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller
|
||||
82
.github/workflows/gradle-release-plugins.yml
vendored
Normal file
82
.github/workflows/gradle-release-plugins.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Run Gradle Release for Kestra Plugins
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
|
||||
required: true
|
||||
type: string
|
||||
dryRun:
|
||||
description: 'Use DRY_RUN mode'
|
||||
required: false
|
||||
default: 'false'
|
||||
jobs:
|
||||
release:
|
||||
name: Release plugins
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
- name: 'Configure Git'
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
if: ${{ github.event.inputs.dryRun == 'false' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
|
||||
- name: Run Gradle Release (DRY_RUN)
|
||||
if: ${{ github.event.inputs.dryRun == 'true' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
--dry-run \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
89
.github/workflows/gradle-release.yml
vendored
Normal file
89
.github/workflows/gradle-release.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Run Gradle Release
|
||||
run-name: "Releasing Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
|
||||
required: true
|
||||
type: string
|
||||
env:
|
||||
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
|
||||
NEXT_VERSION: "${{ github.event.inputs.nextVersion }}"
|
||||
jobs:
|
||||
release:
|
||||
name: Release Kestra
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
run: |
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
|
||||
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
|
||||
exit 1;
|
||||
fi
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
|
||||
|
||||
# Create and push release branch
|
||||
git checkout -b "$PUSH_RELEASE_BRANCH";
|
||||
git push -u origin "$PUSH_RELEASE_BRANCH";
|
||||
|
||||
# Run gradle release
|
||||
git checkout develop;
|
||||
|
||||
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
|
||||
# -SNAPSHOT qualifier maybe used to test release-candidates
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}" \
|
||||
-Prelease.failOnSnapshotDependencies=false
|
||||
else
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}"
|
||||
fi
|
||||
86
.github/workflows/main-build.yml
vendored
86
.github/workflows/main-build.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: Main Workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- releases/*
|
||||
- develop
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: 'Skip test'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'false'
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: Backend tests
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
frontend-tests:
|
||||
name: Frontend tests
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
publish-develop-docker:
|
||||
name: Publish Docker
|
||||
needs: [backend-tests, frontend-tests]
|
||||
if: "!failure() && !cancelled() && github.ref == 'refs/heads/develop'"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-docker.yml@main
|
||||
with:
|
||||
plugin-version: 'LATEST-SNAPSHOT'
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
|
||||
publish-develop-maven:
|
||||
name: Publish develop Maven
|
||||
needs: [ backend-tests, frontend-tests ]
|
||||
if: "!failure() && !cancelled() && github.ref == 'refs/heads/develop'"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-maven.yml@main
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [publish-develop-docker, publish-develop-maven]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Trigger EE Workflow
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/kestra-ee
|
||||
event-type: "oss-updated"
|
||||
|
||||
# Slack
|
||||
- name: Slack - Notification
|
||||
if: ${{ failure() && env.SLACK_WEBHOOK_URL != 0 && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') }}
|
||||
uses: kestra-io/actions/composite/slack-status@main
|
||||
with:
|
||||
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
73
.github/workflows/main.yml
vendored
Normal file
73
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
name: Main Workflow
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/*
|
||||
- develop
|
||||
tags:
|
||||
- v*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Execute tests
|
||||
uses: ./.github/workflows/workflow-test.yml
|
||||
with:
|
||||
report-status: false
|
||||
|
||||
release:
|
||||
name: Release
|
||||
needs: [tests]
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- release
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
# Update
|
||||
- name: Github - Update internal
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
if: github.ref == 'refs/heads/develop' && needs.docker.result == 'success'
|
||||
with:
|
||||
workflow: oss-build.yml
|
||||
repo: kestra-io/infra
|
||||
ref: master
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
|
||||
# Slack
|
||||
- name: Slack - Notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR" # _int_git channel
|
||||
60
.github/workflows/pre-release.yml
vendored
60
.github/workflows/pre-release.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: Pre Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: 'Skip test'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'false'
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-build-artifacts.yml@main
|
||||
|
||||
backend-tests:
|
||||
name: Backend tests
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
frontend-tests:
|
||||
name: Frontend tests
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
publish-maven:
|
||||
name: Publish Maven
|
||||
needs: [ backend-tests, frontend-tests ]
|
||||
if: "!failure() && !cancelled()"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-maven.yml@main
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
publish-github:
|
||||
name: Github Release
|
||||
needs: [build-artifacts, backend-tests, frontend-tests]
|
||||
if: "!failure() && !cancelled()"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-github.yml@main
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
16
.github/workflows/pull-request-cleanup.yml
vendored
16
.github/workflows/pull-request-cleanup.yml
vendored
@@ -1,16 +0,0 @@
|
||||
name: Pull Request - Delete Docker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
# TODO import a reusable one
|
||||
jobs:
|
||||
publish:
|
||||
name: Pull Request - Delete Docker
|
||||
if: github.repository == 'kestra-io/kestra' # prevent running on forks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dataaxiom/ghcr-cleanup-action@v1
|
||||
with:
|
||||
package: kestra-pr
|
||||
delete-tags: ${{ github.event.pull_request.number }}
|
||||
30
.github/workflows/pull-request.yml
vendored
30
.github/workflows/pull-request.yml
vendored
@@ -2,6 +2,8 @@ name: Pull Request Workflow
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
@@ -9,7 +11,6 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
if: ${{ github.event.pull_request.draft == false }}
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
@@ -32,7 +33,7 @@ jobs:
|
||||
name: Frontend - Tests
|
||||
needs: [file-changes]
|
||||
if: "needs.file-changes.outputs.ui == 'true'"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -41,17 +42,26 @@ jobs:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true'"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
e2e-tests:
|
||||
name: E2E - Tests
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-e2e-tests.yml@main
|
||||
|
||||
generate-pull-request-docker-image:
|
||||
name: Generate PR docker image
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-pullrequest-publish-docker.yml@main
|
||||
end:
|
||||
name: End
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs: [frontend, backend]
|
||||
steps:
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR"
|
||||
34
.github/workflows/release-docker.yml
vendored
34
.github/workflows/release-docker.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Publish docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retag-latest:
|
||||
description: 'Retag latest Docker images'
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
retag-lts:
|
||||
description: 'Retag LTS Docker images'
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
dry-run:
|
||||
description: 'Dry run mode that will not write or release anything'
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
publish-docker:
|
||||
name: Publish Docker
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-docker.yml@main
|
||||
with:
|
||||
retag-latest: ${{ inputs.retag-latest }}
|
||||
retag-lts: ${{ inputs.retag-lts }}
|
||||
dry-run: ${{ inputs.dry-run }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
60
.github/workflows/setversion-tag-plugins.yml
vendored
Normal file
60
.github/workflows/setversion-tag-plugins.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
name: Set Version and Tag Plugins
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0)'
|
||||
required: true
|
||||
type: string
|
||||
dryRun:
|
||||
description: 'Use DRY_RUN mode'
|
||||
required: false
|
||||
default: 'false'
|
||||
jobs:
|
||||
tag:
|
||||
name: Release plugins
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
- name: 'Configure Git'
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Set Version and Tag Plugins
|
||||
if: ${{ github.event.inputs.dryRun == 'false' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
|
||||
- name: Set Version and Tag Plugins (DRY_RUN)
|
||||
if: ${{ github.event.inputs.dryRun == 'true' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--dry-run \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
58
.github/workflows/setversion-tag.yml
vendored
Normal file
58
.github/workflows/setversion-tag.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Set Version and Tag
|
||||
run-name: "Set version and Tag Kestra to ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.1)'
|
||||
required: true
|
||||
type: string
|
||||
env:
|
||||
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
|
||||
jobs:
|
||||
release:
|
||||
name: Release Kestra
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/heads/releases/v')
|
||||
steps:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
run: |
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)(\.[0-9]+)(-rc[0-9])?(-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CURRENT_BRANCH="{{ github.ref }}"
|
||||
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
|
||||
|
||||
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
|
||||
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
# Update version
|
||||
sed -i "s/^version=.*/version=$RELEASE_VERSION/" ./gradle.properties
|
||||
git add ./gradle.properties
|
||||
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
|
||||
git push
|
||||
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
|
||||
git push origin "v$RELEASE_VERSION"
|
||||
81
.github/workflows/vulnerabilities-check.yml
vendored
81
.github/workflows/vulnerabilities-check.yml
vendored
@@ -8,25 +8,30 @@ on:
|
||||
env:
|
||||
JAVA_VERSION: '21'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-check:
|
||||
name: Dependency Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/composite/setup-build@main
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Npm
|
||||
- name: Npm - Install
|
||||
@@ -48,3 +53,69 @@ jobs:
|
||||
with:
|
||||
name: dependency-check-report
|
||||
path: build/reports/dependency-check-report.html
|
||||
|
||||
develop-image-check:
|
||||
name: Image Check (develop)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: false
|
||||
node-enabled: false
|
||||
caches-enabled: true
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: table
|
||||
skip-dirs: /app/plugins
|
||||
scanners: vuln
|
||||
|
||||
latest-image-check:
|
||||
name: Image Check (latest)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: false
|
||||
node-enabled: false
|
||||
caches-enabled: true
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
skip-dirs: /app/plugins
|
||||
scanners: vuln
|
||||
139
.github/workflows/workflow-backend-test.yml
vendored
Normal file
139
.github/workflows/workflow-backend-test.yml
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
name: Backend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
SONAR_TOKEN:
|
||||
description: 'Sonar Token'
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Setup - Start docker compose
|
||||
shell: bash
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
|
||||
# Gradle check
|
||||
- name: Gradle - Build
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
reporter: java-junit
|
||||
path: '**/build/test-results/test/TEST-*.xml'
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
if: env.SONAR_TOKEN != ''
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
shell: bash
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# GCP
|
||||
- name: GCP - Auth with unit test account
|
||||
id: auth
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
uses: "google-github-actions/auth@v2"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- name: GCP - Setup Cloud SDK
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
uses: "google-github-actions/setup-gcloud@v2"
|
||||
|
||||
# Allure check
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
name: Allure - Generate slug variables
|
||||
|
||||
- name: Allure - Publish report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: Jacoco - Copy reports
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# Codecov
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
152
.github/workflows/workflow-build-artifacts.yml
vendored
Normal file
152
.github/workflows/workflow-build-artifacts.yml
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
outputs:
|
||||
docker-tag:
|
||||
value: ${{ jobs.build.outputs.docker-tag }}
|
||||
description: "The Docker image Tag for Kestra"
|
||||
docker-artifact-name:
|
||||
value: ${{ jobs.build.outputs.docker-artifact-name }}
|
||||
description: "The GitHub artifact containing the Kestra docker image name."
|
||||
plugins:
|
||||
value: ${{ jobs.build.outputs.plugins }}
|
||||
description: "The Kestra plugins list used for the build."
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build - Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Npm
|
||||
- name: Setup - Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Plugins - Set List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Gradle - Build
|
||||
shell: bash
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Artifacts - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Setup - Docker vars
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Docker - Build & export image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Artifacts - Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Artifacts - Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Artifacts - Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
84
.github/workflows/workflow-frontend-test.yml
vendored
Normal file
84
.github/workflows/workflow-frontend-test.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: Frontend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
|
||||
env:
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Npm - install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm - lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
reporter: github-pr-review
|
||||
workdir: ui
|
||||
|
||||
- name: Npm - Run build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:cicd
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Storybook - Build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run build-storybook --quiet
|
||||
|
||||
- name: Storybook - Run tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:127.0.0.1:6006 && npm run test:storybook"
|
||||
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
flags: frontend
|
||||
48
.github/workflows/workflow-github-release.yml
vendored
Normal file
48
.github/workflows/workflow-github-release.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: Github - Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "The Github personal token."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Github - Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Download Exec
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
# GitHub Release
|
||||
- name: GitHub - Create release
|
||||
id: create_github_release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: steps.create_github_release.conclusion == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
100
.github/workflows/workflow-publish-docker.yml
vendored
Normal file
100
.github/workflows/workflow-publish-docker.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
name: Publish - Docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
||||
publish:
|
||||
name: Publish - Docker
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}-no-plugins
|
||||
packages: jattach
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
plugins: ${{ needs.build-artifacts.outputs.plugins }}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Docker - Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Vars
|
||||
- name: Docker - Set image name
|
||||
shell: bash
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Docker - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Docker - Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libraries }}
|
||||
57
.github/workflows/workflow-publish-maven.yml
vendored
Normal file
57
.github/workflows/workflow-publish-maven.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Publish - Maven
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish - Maven
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Setup build
|
||||
- name: Setup - Build
|
||||
uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish - Release package to Maven Central
|
||||
shell: bash
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype ${{ startsWith(github.ref, 'refs/tags/v') && 'closeAndReleaseSonatypeStagingRepository' || '' }}
|
||||
|
||||
# Gradle dependency
|
||||
- name: Java - Gradle dependency graph
|
||||
uses: gradle/actions/dependency-submission@v4
|
||||
73
.github/workflows/workflow-release.yml
vendored
Normal file
73
.github/workflows/workflow-release.yml
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
uses: ./.github/workflows/workflow-publish-maven.yml
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
97
.github/workflows/workflow-test.yml
vendored
Normal file
97
.github/workflows/workflow-test.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * 1,2,3,4,5'
|
||||
workflow_call:
|
||||
inputs:
|
||||
report-status:
|
||||
description: "Report status of the jobs in outputs"
|
||||
type: string
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
frontend_status:
|
||||
description: "Status of the frontend job"
|
||||
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status:
|
||||
description: "Status of the backend job"
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
# Output every job status
|
||||
# To be used in other workflows
|
||||
report-status:
|
||||
name: Report Status
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: always() && (inputs.report-status == 'true')
|
||||
outputs:
|
||||
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
|
||||
steps:
|
||||
- id: set-frontend-status
|
||||
name: Set frontend job status
|
||||
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
|
||||
|
||||
- id: set-backend-status
|
||||
name: Set backend job status
|
||||
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
|
||||
|
||||
notify:
|
||||
name: Notify - Slack
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: |
|
||||
always() && (needs.frontend.result != 'success' ||
|
||||
needs.backend.result != 'success')
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -59,4 +59,3 @@ core/src/main/resources/gradle.properties
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
/jmh-benchmarks/src/main/resources/gradle.properties
|
||||
|
||||
24
.plugins
24
.plugins
@@ -3,12 +3,10 @@
|
||||
# Format: <RepositoryName>:<GroupId>:<ArtifactId>:<Version>
|
||||
#
|
||||
# Uncomment the lines corresponding to the plugins to be installed:
|
||||
#plugin-ai:io.kestra.plugin:plugin-ai:LATEST
|
||||
#plugin-airbyte:io.kestra.plugin:plugin-airbyte:LATEST
|
||||
#plugin-airflow:io.kestra.plugin:plugin-airflow:LATEST
|
||||
#plugin-amqp:io.kestra.plugin:plugin-amqp:LATEST
|
||||
#plugin-ansible:io.kestra.plugin:plugin-ansible:LATEST
|
||||
#plugin-anthropic:io.kestra.plugin:plugin-anthropic:LATEST
|
||||
#plugin-aws:io.kestra.plugin:plugin-aws:LATEST
|
||||
#plugin-azure:io.kestra.plugin:plugin-azure:LATEST
|
||||
#plugin-cassandra:io.kestra.plugin:plugin-cassandra:LATEST
|
||||
@@ -19,7 +17,6 @@
|
||||
#plugin-databricks:io.kestra.plugin:plugin-databricks:LATEST
|
||||
#plugin-datahub:io.kestra.plugin:plugin-datahub:LATEST
|
||||
#plugin-dataform:io.kestra.plugin:plugin-dataform:LATEST
|
||||
#plugin-datagen:io.kestra.plugin:plugin-datagen:LATEST
|
||||
#plugin-dbt:io.kestra.plugin:plugin-dbt:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-db2:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-mongodb:LATEST
|
||||
@@ -27,23 +24,18 @@
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-oracle:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-postgres:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-sqlserver:LATEST
|
||||
#plugin-deepseek:io.kestra.plugin:plugin-deepseek:LATEST
|
||||
#plugin-docker:io.kestra.plugin:plugin-docker:LATEST
|
||||
#plugin-elasticsearch:io.kestra.plugin:plugin-elasticsearch:LATEST
|
||||
#plugin-fivetran:io.kestra.plugin:plugin-fivetran:LATEST
|
||||
#plugin-fs:io.kestra.plugin:plugin-fs:LATEST
|
||||
#plugin-gcp:io.kestra.plugin:plugin-gcp:LATEST
|
||||
#plugin-gemini:io.kestra.plugin:plugin-gemini:LATEST
|
||||
#plugin-git:io.kestra.plugin:plugin-git:LATEST
|
||||
#plugin-github:io.kestra.plugin:plugin-github:LATEST
|
||||
#plugin-gitlab:io.kestra.plugin:plugin-gitlab:LATEST
|
||||
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
|
||||
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
|
||||
#plugin-graphql:io.kestra.plugin:plugin-graphql:LATEST
|
||||
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
|
||||
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
|
||||
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
|
||||
#plugin-influxdb:io.kestra.plugin:plugin-influxdb:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-as400:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-db2:LATEST
|
||||
@@ -64,43 +56,31 @@
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlite:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
|
||||
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
|
||||
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
|
||||
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
|
||||
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
|
||||
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
|
||||
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
|
||||
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
|
||||
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
|
||||
#plugin-meilisearch:io.kestra.plugin:plugin-meilisearch:LATEST
|
||||
#plugin-minio:io.kestra.plugin:plugin-minio:LATEST
|
||||
#plugin-mistral:io.kestra.plugin:plugin-mistral:LATEST
|
||||
#plugin-modal:io.kestra.plugin:plugin-modal:LATEST
|
||||
#plugin-mongodb:io.kestra.plugin:plugin-mongodb:LATEST
|
||||
#plugin-mqtt:io.kestra.plugin:plugin-mqtt:LATEST
|
||||
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
|
||||
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
|
||||
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
|
||||
#plugin-notion:io.kestra.plugin:plugin-notion:LATEST
|
||||
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
|
||||
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
|
||||
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
|
||||
#plugin-perplexity:io.kestra.plugin:plugin-perplexity:LATEST
|
||||
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
|
||||
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
|
||||
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-bun:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-deno:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-jython:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-lua:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-nashorn:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-node:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-perl:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-php:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-powershell:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-python:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-r:LATEST
|
||||
@@ -108,18 +88,16 @@
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-shell:LATEST
|
||||
#plugin-serdes:io.kestra.plugin:plugin-serdes:LATEST
|
||||
#plugin-servicenow:io.kestra.plugin:plugin-servicenow:LATEST
|
||||
#plugin-sifflet:io.kestra.plugin:plugin-sifflet:LATEST
|
||||
#plugin-singer:io.kestra.plugin:plugin-singer:LATEST
|
||||
#plugin-soda:io.kestra.plugin:plugin-soda:LATEST
|
||||
#plugin-solace:io.kestra.plugin:plugin-solace:LATEST
|
||||
#plugin-spark:io.kestra.plugin:plugin-spark:LATEST
|
||||
#plugin-sqlmesh:io.kestra.plugin:plugin-sqlmesh:LATEST
|
||||
#plugin-supabase:io.kestra.plugin:plugin-supabase:LATEST
|
||||
#plugin-surrealdb:io.kestra.plugin:plugin-surrealdb:LATEST
|
||||
#plugin-terraform:io.kestra.plugin:plugin-terraform:LATEST
|
||||
#plugin-transform:io.kestra.plugin:plugin-transform-grok:LATEST
|
||||
#plugin-transform:io.kestra.plugin:plugin-transform-json:LATEST
|
||||
#plugin-tika:io.kestra.plugin:plugin-tika:LATEST
|
||||
#plugin-trivy:io.kestra.plugin:plugin-trivy:LATEST
|
||||
#plugin-weaviate:io.kestra.plugin:plugin-weaviate:LATEST
|
||||
#plugin-zendesk:io.kestra.plugin:plugin-zendesk:LATEST
|
||||
#plugin-typesense:io.kestra.plugin:plugin-typesense:LATEST
|
||||
|
||||
305
AGENTS.md
305
AGENTS.md
@@ -1,305 +0,0 @@
|
||||
# Kestra AGENTS.md
|
||||
|
||||
This file provides guidance for AI coding agents working on the Kestra project. Kestra is an open-source data orchestration and scheduling platform built with Java (Micronaut) and Vue.js.
|
||||
|
||||
## Repository Layout
|
||||
|
||||
- **`core/`**: Core Kestra framework and task definitions
|
||||
- **`cli/`**: Command-line interface and server implementation
|
||||
- **`webserver/`**: REST API server implementation
|
||||
- **`ui/`**: Vue.js frontend application
|
||||
- **`jdbc-*`**: Database connector modules (H2, MySQL, PostgreSQL)
|
||||
- **`script/`**: Script execution engine
|
||||
- **`storage-local/`**: Local file storage implementation
|
||||
- **`repository-memory/`**: In-memory repository implementation
|
||||
- **`runner-memory/`**: In-memory execution runner
|
||||
- **`processor/`**: Task processing engine
|
||||
- **`model/`**: Data models and Data Transfer Objects
|
||||
- **`platform/`**: Platform-specific implementations
|
||||
- **`tests/`**: Integration test framework
|
||||
- **`e2e-tests/`**: End-to-end testing suite
|
||||
|
||||
## Development Environment
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Java 21+
|
||||
- Node.js 22+ and npm
|
||||
- Python 3, pip, and python venv
|
||||
- Docker & Docker Compose
|
||||
- Gradle (wrapper included)
|
||||
|
||||
### Quick Setup with Devcontainer
|
||||
|
||||
The easiest way to get started is using the provided devcontainer:
|
||||
|
||||
1. Install VSCode Remote Development extension
|
||||
2. Run `Dev Containers: Open Folder in Container...` from command palette
|
||||
3. Select the Kestra root folder
|
||||
4. Wait for Gradle build to complete
|
||||
|
||||
### Manual Setup
|
||||
|
||||
1. Clone the repository
|
||||
2. Run `./gradlew build` to build the backend
|
||||
3. Navigate to `ui/` and run `npm install`
|
||||
4. Create configuration files as described below
|
||||
|
||||
## Configuration Files
|
||||
|
||||
### Backend Configuration
|
||||
|
||||
Create `cli/src/main/resources/application-override.yml`:
|
||||
|
||||
**Local Mode (H2 database):**
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
**Standalone Mode (PostgreSQL):**
|
||||
|
||||
```yaml
|
||||
kestra:
|
||||
repository:
|
||||
type: postgres
|
||||
storage:
|
||||
type: local
|
||||
local:
|
||||
base-path: "/app/storage"
|
||||
queue:
|
||||
type: postgres
|
||||
tasks:
|
||||
tmp-dir:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
url: jdbc:postgresql://host.docker.internal:5432/kestra
|
||||
driverClassName: org.postgresql.Driver
|
||||
username: kestra
|
||||
password: k3str4
|
||||
|
||||
flyway:
|
||||
datasources:
|
||||
postgres:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
### Frontend Configuration
|
||||
|
||||
Create `ui/.env.development.local` for environment variables.
|
||||
|
||||
## Running the Application
|
||||
|
||||
### Backend
|
||||
|
||||
- **Local mode**: `./gradlew runLocal` (uses H2 database)
|
||||
- **Standalone mode**: Use VSCode Run and Debug with main class `io.kestra.cli.App` and args `server standalone`
|
||||
|
||||
### Frontend
|
||||
|
||||
- Navigate to `ui/` directory
|
||||
- Run `npm run dev` for development server (port 5173)
|
||||
- Run `npm run build` for production build
|
||||
|
||||
## Building and Testing
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
# Build the project
|
||||
./gradlew build
|
||||
|
||||
# Run tests
|
||||
./gradlew test
|
||||
|
||||
# Run specific module tests
|
||||
./gradlew :core:test
|
||||
|
||||
# Clean build
|
||||
./gradlew clean build
|
||||
```
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd ui
|
||||
npm install
|
||||
npm run test
|
||||
npm run lint
|
||||
npm run build
|
||||
```
|
||||
|
||||
### End-to-End Tests
|
||||
|
||||
```bash
|
||||
# Build and start E2E tests
|
||||
./build-and-start-e2e-tests.sh
|
||||
|
||||
# Or use the Makefile
|
||||
make install
|
||||
make install-plugins
|
||||
make start-standalone-postgres
|
||||
```
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
### Java Backend
|
||||
|
||||
- Use Java 21 features
|
||||
- Follow Micronaut framework patterns
|
||||
- Add Swagger annotations for API documentation
|
||||
- Use annotation processors (enable in IDE)
|
||||
- Set `MICRONAUT_ENVIRONMENTS=local,override` for custom config
|
||||
- Set `KESTRA_PLUGINS_PATH` for custom plugin loading
|
||||
|
||||
### Vue.js Frontend
|
||||
|
||||
- Vue 3 with Composition API
|
||||
- TypeScript for type safety
|
||||
- Vite for build tooling
|
||||
- ESLint and Prettier for code quality
|
||||
- Component-based architecture in `src/components/`
|
||||
|
||||
### Code Style
|
||||
|
||||
- Follow `.editorconfig` settings
|
||||
- Use 4 spaces for Java, 2 spaces for YAML/JSON/CSS
|
||||
- Enable format on save in VSCode
|
||||
- Use Prettier for frontend code formatting
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Backend Testing
|
||||
|
||||
- Unit tests in `src/test/java/`
|
||||
- Integration tests in `tests/` module
|
||||
- Use Micronaut test framework
|
||||
- Test both local and standalone modes
|
||||
|
||||
### Frontend Testing
|
||||
- Unit tests with Jest
|
||||
- E2E tests with Playwright
|
||||
- Component testing with Storybook
|
||||
- Run `npm run test:unit` and `npm run test:e2e`
|
||||
|
||||
## Plugin Development
|
||||
|
||||
### Creating Plugins
|
||||
|
||||
- Follow the [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/)
|
||||
- Place JAR files in `KESTRA_PLUGINS_PATH`
|
||||
- Use the plugin template structure
|
||||
- Test with both local and standalone modes
|
||||
|
||||
### Plugin Loading
|
||||
|
||||
- Set `KESTRA_PLUGINS_PATH` environment variable
|
||||
- Use devcontainer mounts for local development
|
||||
- Plugins are loaded at startup
|
||||
|
||||
## Common Issues and Solutions
|
||||
|
||||
### JavaScript Heap Out of Memory
|
||||
|
||||
Set `NODE_OPTIONS=--max-old-space-size=4096` environment variable.
|
||||
|
||||
### CORS Issues
|
||||
|
||||
Ensure backend CORS is configured for `http://localhost:5173` when using frontend dev server.
|
||||
|
||||
### Database Connection Issues
|
||||
|
||||
- Use `host.docker.internal` instead of `localhost` when connecting from devcontainer
|
||||
- Verify PostgreSQL is running and accessible
|
||||
- Check database credentials and permissions
|
||||
|
||||
### Gradle Build Issues
|
||||
|
||||
- Clear Gradle cache: `./gradlew clean`
|
||||
- Check Java version compatibility
|
||||
- Verify all dependencies are available
|
||||
|
||||
## Pull Request Guidelines
|
||||
|
||||
### Before Submitting
|
||||
|
||||
1. Run all tests: `./gradlew test` and `npm test`
|
||||
2. Check code formatting: `./gradlew spotlessCheck`
|
||||
3. Verify CORS configuration if changing API
|
||||
4. Test both local and standalone modes
|
||||
5. Update documentation for user-facing changes
|
||||
|
||||
### Commit Messages
|
||||
|
||||
- Follow conventional commit format
|
||||
- Use present tense ("Add feature" not "Added feature")
|
||||
- Reference issue numbers when applicable
|
||||
- Keep commits focused and atomic
|
||||
|
||||
### Review Checklist
|
||||
|
||||
- [ ] All tests pass
|
||||
- [ ] Code follows project style guidelines
|
||||
- [ ] Documentation is updated
|
||||
- [ ] No breaking changes without migration guide
|
||||
- [ ] CORS properly configured if API changes
|
||||
- [ ] Both local and standalone modes tested
|
||||
|
||||
## Useful Commands
|
||||
|
||||
```bash
|
||||
# Quick development commands
|
||||
./gradlew runLocal # Start local backend
|
||||
./gradlew :ui:build # Build frontend
|
||||
./gradlew clean build # Clean rebuild
|
||||
npm run dev # Start frontend dev server
|
||||
make install # Install Kestra locally
|
||||
make start-standalone-postgres # Start with PostgreSQL
|
||||
|
||||
# Testing commands
|
||||
./gradlew test # Run all backend tests
|
||||
./gradlew :core:test # Run specific module tests
|
||||
npm run test # Run frontend tests
|
||||
npm run lint # Lint frontend code
|
||||
```
|
||||
|
||||
## Getting Help
|
||||
|
||||
- Open a [GitHub issue](https://github.com/kestra-io/kestra/issues)
|
||||
- Join the [Kestra Slack community](https://kestra.io/slack)
|
||||
- Check the [main documentation](https://kestra.io/docs)
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `MICRONAUT_ENVIRONMENTS` | Custom config environments | `local,override` |
|
||||
| `KESTRA_PLUGINS_PATH` | Path to custom plugins | `/workspaces/kestra/local/plugins` |
|
||||
| `NODE_OPTIONS` | Node.js options | `--max-old-space-size=4096` |
|
||||
| `JAVA_HOME` | Java installation path | `/usr/java/jdk-21` |
|
||||
|
||||
Remember: Always test your changes in both local and standalone modes, and ensure CORS is properly configured for frontend development.
|
||||
@@ -16,9 +16,8 @@ RUN apt-get update -y && \
|
||||
if [ -n "${APT_PACKAGES}" ]; then apt-get install -y --no-install-recommends ${APT_PACKAGES}; fi && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /var/tmp/* /tmp/* && \
|
||||
curl -LsSf https://astral.sh/uv/0.6.17/install.sh | sh && mv /root/.local/bin/uv /bin && mv /root/.local/bin/uvx /bin && \
|
||||
if [ -n "${KESTRA_PLUGINS}" ]; then /app/kestra plugins install ${KESTRA_PLUGINS} && rm -rf /tmp/*; fi && \
|
||||
if [ -n "${PYTHON_LIBRARIES}" ]; then uv pip install --system ${PYTHON_LIBRARIES}; fi && \
|
||||
if [ -n "${PYTHON_LIBRARIES}" ]; then pip install ${PYTHON_LIBRARIES}; fi && \
|
||||
chown -R kestra:kestra /app
|
||||
|
||||
USER kestra
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
ARG KESTRA_DOCKER_BASE_VERSION=develop
|
||||
FROM kestra/kestra:$KESTRA_DOCKER_BASE_VERSION
|
||||
|
||||
USER root
|
||||
|
||||
COPY --chown=kestra:kestra docker /
|
||||
|
||||
USER kestra
|
||||
27
Makefile
27
Makefile
@@ -77,7 +77,7 @@ install-plugins:
|
||||
else \
|
||||
${KESTRA_BASEDIR}/bin/kestra plugins install $$CURRENT_PLUGIN \
|
||||
--plugins ${KESTRA_BASEDIR}/plugins \
|
||||
--repositories=https://central.sonatype.com/repository/maven-snapshots || exit 1; \
|
||||
--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots || exit 1; \
|
||||
fi \
|
||||
done < $$PLUGIN_LIST
|
||||
|
||||
@@ -89,7 +89,7 @@ build-docker: build-exec
|
||||
--compress \
|
||||
--rm \
|
||||
-f ./Dockerfile \
|
||||
--build-arg="APT_PACKAGES=python3 python-is-python3 python3-pip curl jattach" \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
|
||||
--build-arg="PYTHON_LIBRARIES=kestra" \
|
||||
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
|
||||
|
||||
@@ -130,6 +130,9 @@ datasources:
|
||||
username: kestra
|
||||
password: k3str4
|
||||
kestra:
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
encryption:
|
||||
secret-key: 3ywuDa/Ec61VHkOX3RlI9gYq7CaD0mv0Pf3DHtAXA6U=
|
||||
repository:
|
||||
@@ -178,8 +181,8 @@ clone-plugins:
|
||||
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
|
||||
@mkdir -p "$(PLUGIN_GIT_DIR)"
|
||||
@echo "Fetching repository list from GitHub..."
|
||||
@REPOS=$$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-"); \
|
||||
for repo in $$REPOS; do \
|
||||
@REPOS=$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-") \
|
||||
for repo in $$REPOS; do \
|
||||
if [[ $$repo == plugin-* ]]; then \
|
||||
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
|
||||
echo "Skipping: $$repo (Already cloned)"; \
|
||||
@@ -191,22 +194,6 @@ clone-plugins:
|
||||
done
|
||||
@echo "Done!"
|
||||
|
||||
# Pull every plugins in main or master branch
|
||||
pull-plugins:
|
||||
@echo "🔍 Pulling repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
@for repo in "$(PLUGIN_GIT_DIR)"/*; do \
|
||||
if [ -d "$$repo/.git" ]; then \
|
||||
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
|
||||
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
|
||||
echo "🔄 Pulling: $$(basename "$$repo") (branch: $$branch)"; \
|
||||
git -C "$$repo" pull; \
|
||||
else \
|
||||
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch, currently on $$branch)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done
|
||||
@echo "✅ Done pulling!"
|
||||
|
||||
# Update all plugins jar
|
||||
build-plugins:
|
||||
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
|
||||
@@ -65,6 +65,10 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Try the Live Demo
|
||||
|
||||
Try Kestra with our [**Live Demo**](https://demo.kestra.io/ui/login?auto). No installation required!
|
||||
|
||||
### Get Started Locally in 5 Minutes
|
||||
|
||||
#### Launch Kestra in Docker
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# E2E main script that can be run on a dev computer or in the CI
|
||||
# it will build the backend of the current git repo and the frontend
|
||||
# create a docker image out of it
|
||||
# run tests on this image
|
||||
|
||||
|
||||
LOCAL_IMAGE_VERSION="local-e2e-$(date +%s)"
|
||||
|
||||
echo "Running E2E"
|
||||
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
|
||||
start_time=$(date +%s)
|
||||
|
||||
echo ""
|
||||
echo "Building the image for this current repository"
|
||||
make clean
|
||||
make build-docker VERSION=$LOCAL_IMAGE_VERSION
|
||||
|
||||
end_time=$(date +%s)
|
||||
elapsed=$(( end_time - start_time ))
|
||||
|
||||
echo ""
|
||||
echo "building elapsed time: ${elapsed} seconds"
|
||||
echo ""
|
||||
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
|
||||
start_time2=$(date +%s)
|
||||
|
||||
echo "cd ./ui"
|
||||
cd ./ui
|
||||
echo "npm i"
|
||||
npm i
|
||||
|
||||
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
|
||||
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"
|
||||
|
||||
end_time2=$(date +%s)
|
||||
elapsed2=$(( end_time2 - start_time2 ))
|
||||
echo ""
|
||||
echo "Tests elapsed time: ${elapsed2} seconds"
|
||||
echo ""
|
||||
total_elapsed=$(( elapsed + elapsed2 ))
|
||||
echo "Total elapsed time: ${total_elapsed} seconds"
|
||||
echo ""
|
||||
|
||||
exit 0
|
||||
383
build.gradle
383
build.gradle
@@ -16,12 +16,12 @@ plugins {
|
||||
id "java"
|
||||
id 'java-library'
|
||||
id "idea"
|
||||
id "com.gradleup.shadow" version "8.3.9"
|
||||
id "com.gradleup.shadow" version "8.3.6"
|
||||
id "application"
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "6.3.1.5724"
|
||||
id "org.sonarqube" version "6.0.1.5171"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
@@ -31,13 +31,15 @@ plugins {
|
||||
id 'com.github.node-gradle.node' version '7.1.0'
|
||||
|
||||
// release
|
||||
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
|
||||
id 'net.researchgate.release' version '3.1.0'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.2"
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.0"
|
||||
id 'signing'
|
||||
id "com.vanniktech.maven.publish" version "0.34.0"
|
||||
id 'ru.vyarus.pom' version '3.0.0' apply false
|
||||
id 'ru.vyarus.github-info' version '2.0.0' apply false
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.1.3" apply false
|
||||
id "org.owasp.dependencycheck" version "12.1.0" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -71,11 +73,6 @@ dependencies {
|
||||
* Dependencies
|
||||
**********************************************************************************************************************/
|
||||
allprojects {
|
||||
|
||||
tasks.withType(GenerateModuleMetadata).configureEach {
|
||||
suppressedValidationErrors.add('enforced-platform')
|
||||
}
|
||||
|
||||
if (it.name != 'platform') {
|
||||
group = "io.kestra"
|
||||
|
||||
@@ -148,7 +145,6 @@ allprojects {
|
||||
implementation group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names'
|
||||
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-guava'
|
||||
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310'
|
||||
implementation group: 'com.fasterxml.uuid', name: 'java-uuid-generator'
|
||||
|
||||
// kestra
|
||||
implementation group: 'com.devskiller.friendly-id', name: 'friendly-id'
|
||||
@@ -169,7 +165,7 @@ allprojects {
|
||||
* Test
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
apply plugin: "com.adarshr.test-logger"
|
||||
|
||||
java {
|
||||
@@ -200,75 +196,36 @@ subprojects {
|
||||
testImplementation 'org.hamcrest:hamcrest'
|
||||
testImplementation 'org.hamcrest:hamcrest-library'
|
||||
testImplementation 'org.exparity:hamcrest-date'
|
||||
|
||||
//assertj
|
||||
testImplementation 'org.assertj:assertj-core'
|
||||
}
|
||||
|
||||
def commonTestConfig = { Test t ->
|
||||
// set Xmx for test workers
|
||||
t.maxHeapSize = '4g'
|
||||
|
||||
// configure en_US default locale for tests
|
||||
t.systemProperty 'user.language', 'en'
|
||||
t.systemProperty 'user.country', 'US'
|
||||
|
||||
t.environment 'SECRET_MY_SECRET', "{\"secretKey\":\"secretValue\"}".bytes.encodeBase64().toString()
|
||||
t.environment 'SECRET_NEW_LINE', "cGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2\nZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZl\neXJsb25n"
|
||||
t.environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
|
||||
t.environment 'SECRET_NON_B64_SECRET', "some secret value"
|
||||
t.environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
|
||||
t.environment 'ENV_TEST1', "true"
|
||||
t.environment 'ENV_TEST2', "Pass by env"
|
||||
|
||||
}
|
||||
|
||||
tasks.register('flakyTest', Test) { Test t ->
|
||||
group = 'verification'
|
||||
description = 'Runs tests tagged @Flaky but does not fail the build.'
|
||||
|
||||
useJUnitPlatform {
|
||||
includeTags 'flaky'
|
||||
}
|
||||
ignoreFailures = true
|
||||
|
||||
reports {
|
||||
junitXml.required = true
|
||||
junitXml.outputPerTestCase = true
|
||||
junitXml.mergeReruns = true
|
||||
junitXml.includeSystemErrLog = true
|
||||
junitXml.outputLocation = layout.buildDirectory.dir("test-results/flakyTest")
|
||||
}
|
||||
commonTestConfig(t)
|
||||
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnitPlatform {
|
||||
excludeTags 'flaky'
|
||||
}
|
||||
reports {
|
||||
junitXml.required = true
|
||||
junitXml.outputPerTestCase = true
|
||||
junitXml.mergeReruns = true
|
||||
junitXml.includeSystemErrLog = true
|
||||
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
|
||||
}
|
||||
commonTestConfig(it)
|
||||
useJUnitPlatform()
|
||||
|
||||
// set Xmx for test workers
|
||||
maxHeapSize = '4g'
|
||||
|
||||
finalizedBy(tasks.named('flakyTest'))
|
||||
// configure en_US default locale for tests
|
||||
systemProperty 'user.language', 'en'
|
||||
systemProperty 'user.country', 'US'
|
||||
|
||||
environment 'SECRET_MY_SECRET', "{\"secretKey\":\"secretValue\"}".bytes.encodeBase64().toString()
|
||||
environment 'SECRET_NEW_LINE', "cGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2\nZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZl\neXJsb25n"
|
||||
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
|
||||
environment 'SECRET_NON_B64_SECRET', "some secret value"
|
||||
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
|
||||
environment 'KESTRA_TEST1', "true"
|
||||
environment 'KESTRA_TEST2', "Pass by env"
|
||||
}
|
||||
|
||||
testlogger {
|
||||
theme = 'mocha-parallel'
|
||||
showExceptions = true
|
||||
showFullStackTraces = true
|
||||
showCauses = true
|
||||
slowThreshold = 2000
|
||||
showStandardStreams = true
|
||||
showPassedStandardStreams = false
|
||||
showSkippedStandardStreams = true
|
||||
theme 'mocha-parallel'
|
||||
showExceptions true
|
||||
showFullStackTraces true
|
||||
showCauses true
|
||||
slowThreshold 2000
|
||||
showStandardStreams true
|
||||
showPassedStandardStreams false
|
||||
showSkippedStandardStreams true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -308,7 +265,7 @@ subprojects {
|
||||
* Allure Reports
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
dependencies {
|
||||
testImplementation platform("io.qameta.allure:allure-bom")
|
||||
testImplementation "io.qameta.allure:allure-junit5"
|
||||
@@ -322,7 +279,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.24"
|
||||
agent "org.aspectj:aspectjweaver:1.9.23"
|
||||
}
|
||||
|
||||
test {
|
||||
@@ -335,7 +292,7 @@ subprojects {
|
||||
* Jacoco
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
apply plugin: 'jacoco'
|
||||
|
||||
test {
|
||||
@@ -446,7 +403,7 @@ jar {
|
||||
shadowJar {
|
||||
archiveClassifier.set(null)
|
||||
mergeServiceFiles()
|
||||
zip64 = true
|
||||
zip64 true
|
||||
}
|
||||
|
||||
distZip.dependsOn shadowJar
|
||||
@@ -454,7 +411,6 @@ distTar.dependsOn shadowJar
|
||||
startScripts.dependsOn shadowJar
|
||||
startShadowScripts.dependsOn jar
|
||||
shadowJar.dependsOn 'ui:assembleFrontend'
|
||||
shadowJar.dependsOn jar
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
* Executable Jar
|
||||
@@ -463,8 +419,8 @@ def executableDir = layout.buildDirectory.dir("executable")
|
||||
def executable = layout.buildDirectory.file("executable/${project.name}-${project.version}").get().asFile
|
||||
|
||||
tasks.register('writeExecutableJar') {
|
||||
group = "build"
|
||||
description = "Write an executable jar from shadow jar"
|
||||
group "build"
|
||||
description "Write an executable jar from shadow jar"
|
||||
dependsOn = [shadowJar]
|
||||
|
||||
final shadowJarFile = tasks.shadowJar.outputs.files.singleFile
|
||||
@@ -490,8 +446,8 @@ tasks.register('writeExecutableJar') {
|
||||
}
|
||||
|
||||
tasks.register('executableJar', Zip) {
|
||||
group = "build"
|
||||
description = "Zip the executable jar"
|
||||
group "build"
|
||||
description "Zip the executable jar"
|
||||
dependsOn = [writeExecutableJar]
|
||||
|
||||
archiveFileName = "${project.name}-${project.version}.zip"
|
||||
@@ -513,174 +469,115 @@ tasks.register('runLocal', JavaExec) {
|
||||
args 'server', 'local', '--plugins', 'local/plugins'
|
||||
}
|
||||
|
||||
tasks.register('runStandalone', JavaExec) {
|
||||
group = "application"
|
||||
description = "Run Kestra as server local"
|
||||
classpath = project(":cli").sourceSets.main.runtimeClasspath
|
||||
mainClass = mainClassName
|
||||
environment 'MICRONAUT_ENVIRONMENTS', 'override'
|
||||
args 'server', 'standalone', '--plugins', 'local/plugins'
|
||||
}
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
* Publish
|
||||
**********************************************************************************************************************/
|
||||
subprojects {subProject ->
|
||||
|
||||
if (subProject.name != 'jmh-benchmarks' && subProject.name != rootProject.name) {
|
||||
apply plugin: 'signing'
|
||||
apply plugin: "com.vanniktech.maven.publish"
|
||||
|
||||
javadoc {
|
||||
options {
|
||||
locale = 'en_US'
|
||||
encoding = 'UTF-8'
|
||||
addStringOption("Xdoclint:none", "-quiet")
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('sourcesJar', Jar) {
|
||||
dependsOn = [':core:copyGradleProperties']
|
||||
dependsOn = [':ui:assembleFrontend']
|
||||
archiveClassifier.set('sources')
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
sourcesJar.dependsOn ':core:copyGradleProperties'
|
||||
sourcesJar.dependsOn ':ui:assembleFrontend'
|
||||
|
||||
tasks.register('javadocJar', Jar) {
|
||||
archiveClassifier.set('javadoc')
|
||||
from javadoc
|
||||
}
|
||||
|
||||
tasks.register('testsJar', Jar) {
|
||||
group = 'build'
|
||||
description = 'Build the tests jar'
|
||||
|
||||
archiveClassifier.set('tests')
|
||||
if (sourceSets.matching { it.name == 'test'}) {
|
||||
from sourceSets.named('test').get().output
|
||||
}
|
||||
}
|
||||
|
||||
//These modules should not be published
|
||||
def unpublishedModules = ["jdbc-mysql", "jdbc-postgres", "webserver"]
|
||||
if (subProject.name in unpublishedModules){
|
||||
return
|
||||
}
|
||||
|
||||
mavenPublishing {
|
||||
publishToMavenCentral(true)
|
||||
signAllPublications()
|
||||
|
||||
coordinates(
|
||||
"${rootProject.group}",
|
||||
subProject.name == "cli" ? rootProject.name : subProject.name,
|
||||
"${rootProject.version}"
|
||||
)
|
||||
|
||||
pom {
|
||||
name = project.name
|
||||
description = "${project.group}:${project.name}:${rootProject.version}"
|
||||
url = "https://github.com/kestra-io/${rootProject.name}"
|
||||
|
||||
licenses {
|
||||
license {
|
||||
name = "The Apache License, Version 2.0"
|
||||
url = "http://www.apache.org/licenses/LICENSE-2.0.txt"
|
||||
}
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id = "tchiotludo"
|
||||
name = "Ludovic Dehon"
|
||||
email = "ldehon@kestra.io"
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection = 'scm:git:'
|
||||
url = "https://github.com/kestra-io/${rootProject.name}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
afterEvaluate {
|
||||
publishing {
|
||||
publications {
|
||||
withType(MavenPublication).configureEach { publication ->
|
||||
|
||||
if (subProject.name == "platform") {
|
||||
// Clear all artifacts except the BOM
|
||||
publication.artifacts.clear()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subProject.name == 'cli') {
|
||||
|
||||
/* Make sure the special publication is wired *after* every plugin */
|
||||
subProject.afterEvaluate {
|
||||
/* 1. Remove the default java component so Gradle stops expecting
|
||||
the standard cli-*.jar, sources, javadoc, etc. */
|
||||
components.removeAll { it.name == "java" }
|
||||
|
||||
/* 2. Replace the publication’s artifacts with shadow + exec */
|
||||
publishing.publications.withType(MavenPublication).configureEach { pub ->
|
||||
pub.artifacts.clear()
|
||||
|
||||
// main shadow JAR built at root
|
||||
pub.artifact(rootProject.tasks.named("shadowJar").get()) {
|
||||
extension = "jar"
|
||||
}
|
||||
|
||||
// executable ZIP built at root
|
||||
pub.artifact(rootProject.tasks.named("executableJar").get().archiveFile) {
|
||||
classifier = "exec"
|
||||
extension = "zip"
|
||||
}
|
||||
pub.artifact(tasks.named("sourcesJar").get())
|
||||
pub.artifact(tasks.named("javadocJar").get())
|
||||
|
||||
}
|
||||
|
||||
/* 3. Disable Gradle-module metadata for this publication to
|
||||
avoid the “artifact removed from java component” error. */
|
||||
tasks.withType(GenerateModuleMetadata).configureEach { it.enabled = false }
|
||||
|
||||
/* 4. Make every publish task in :cli wait for the two artifacts */
|
||||
tasks.matching { it.name.startsWith("publish") }.configureEach {
|
||||
dependsOn rootProject.tasks.named("shadowJar")
|
||||
dependsOn rootProject.tasks.named("executableJar")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subProject.name != 'platform' && subProject.name != 'cli') {
|
||||
// only if a test source set actually exists (avoids empty artifacts)
|
||||
def hasTests = subProject.extensions.findByName('sourceSets')?.findByName('test') != null
|
||||
|
||||
if (hasTests) {
|
||||
// wire the artifact onto every Maven publication of this subproject
|
||||
publishing {
|
||||
publications {
|
||||
withType(MavenPublication).configureEach { pub ->
|
||||
// keep the normal java component + sources/javadoc already configured
|
||||
pub.artifact(subProject.tasks.named('testsJar').get())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// make sure publish tasks build the tests jar first
|
||||
tasks.matching { it.name.startsWith('publish') }.configureEach {
|
||||
dependsOn subProject.tasks.named('testsJar')
|
||||
}
|
||||
}
|
||||
nexusPublishing {
|
||||
repositoryDescription = "${project.group}:${rootProject.name}:${project.version}"
|
||||
useStaging = !project.version.endsWith("-SNAPSHOT")
|
||||
repositories {
|
||||
sonatype {
|
||||
nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/"))
|
||||
snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply plugin: "maven-publish"
|
||||
apply plugin: 'signing'
|
||||
apply plugin: 'ru.vyarus.pom'
|
||||
apply plugin: 'ru.vyarus.github-info'
|
||||
|
||||
javadoc {
|
||||
options {
|
||||
locale = 'en_US'
|
||||
encoding = 'UTF-8'
|
||||
addStringOption("Xdoclint:none", "-quiet")
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('sourcesJar', Jar) {
|
||||
dependsOn = [':core:copyGradleProperties']
|
||||
dependsOn = [':ui:assembleFrontend']
|
||||
archiveClassifier.set('sources')
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
sourcesJar.dependsOn ':core:copyGradleProperties'
|
||||
sourcesJar.dependsOn ':ui:assembleFrontend'
|
||||
|
||||
tasks.register('javadocJar', Jar) {
|
||||
archiveClassifier.set('javadoc')
|
||||
from javadoc
|
||||
}
|
||||
|
||||
tasks.register('testsJar', Jar) {
|
||||
group = 'build'
|
||||
description = 'Build the tests jar'
|
||||
|
||||
archiveClassifier.set('tests')
|
||||
if (sourceSets.matching { it.name == 'test'}) {
|
||||
from sourceSets.named('test').get().output
|
||||
}
|
||||
}
|
||||
|
||||
github {
|
||||
user 'kestra-io'
|
||||
license 'Apache'
|
||||
repository 'kestra'
|
||||
site 'https://kestra.io'
|
||||
}
|
||||
|
||||
maven.pom {
|
||||
description = 'The modern, scalable orchestrator & scheduler open source platform'
|
||||
|
||||
developers {
|
||||
developer {
|
||||
id = "tchiotludo"
|
||||
name = "Ludovic Dehon"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
sonatypePublication(MavenPublication) {
|
||||
version project.version
|
||||
|
||||
if (project.name.contains('cli')) {
|
||||
groupId "io.kestra"
|
||||
artifactId "kestra"
|
||||
|
||||
artifact shadowJar
|
||||
artifact executableJar
|
||||
} else if (project.name.contains('platform')){
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
} else {
|
||||
from components.java
|
||||
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
|
||||
artifact sourcesJar
|
||||
artifact javadocJar
|
||||
artifact testsJar
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
signing {
|
||||
// only sign JARs that we publish to Sonatype
|
||||
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
|
||||
sign publishing.publications.sonatypePublication
|
||||
}
|
||||
|
||||
tasks.withType(GenerateModuleMetadata).configureEach {
|
||||
// Suppression this validation error as we want to enforce the Kestra platform
|
||||
suppressedValidationErrors.add('enforced-platform')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
|
||||
@@ -33,12 +33,8 @@ dependencies {
|
||||
|
||||
implementation project(":storage-local")
|
||||
|
||||
// Kestra server components
|
||||
implementation project(":executor")
|
||||
implementation project(":scheduler")
|
||||
implementation project(":webserver")
|
||||
implementation project(":worker")
|
||||
|
||||
//test
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
}
|
||||
testImplementation "org.wiremock:wiremock"
|
||||
}
|
||||
|
||||
@@ -14,15 +14,16 @@ import io.micronaut.http.netty.body.NettyJsonHandler;
|
||||
import io.micronaut.json.JsonMapper;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
import picocli.CommandLine;
|
||||
|
||||
public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@CommandLine.Option(names = {"--server"}, description = "Kestra server url", defaultValue = "http://localhost:8080")
|
||||
@@ -34,7 +35,7 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@CommandLine.Option(names = {"--user"}, paramLabel = "<user:password>", description = "Server user and password")
|
||||
protected String user;
|
||||
|
||||
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only)")
|
||||
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only, when multi-tenancy is enabled)")
|
||||
protected String tenantId;
|
||||
|
||||
@CommandLine.Option(names = {"--api-token"}, description = "API Token (EE only).")
|
||||
@@ -45,18 +46,8 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@Nullable
|
||||
private HttpClientConfiguration httpClientConfiguration;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected DefaultHttpClient client() throws URISyntaxException {
|
||||
DefaultHttpClient defaultHttpClient = DefaultHttpClient.builder()
|
||||
.uri(server.toURI())
|
||||
.configuration(httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration())
|
||||
.build();
|
||||
DefaultHttpClient defaultHttpClient = new DefaultHttpClient(server.toURI(), httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration());
|
||||
MessageBodyHandlerRegistry defaultHandlerRegistry = defaultHttpClient.getHandlerRegistry();
|
||||
if (defaultHandlerRegistry instanceof ContextlessMessageBodyHandlerRegistry modifiableRegistry) {
|
||||
modifiableRegistry.add(MediaType.TEXT_JSON_TYPE, new NettyJsonHandler<>(JsonMapper.createDefault()));
|
||||
@@ -84,12 +75,12 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
return request;
|
||||
}
|
||||
|
||||
protected String apiUri(String path, String tenantId) {
|
||||
protected String apiUri(String path) {
|
||||
if (path == null || !path.startsWith("/")) {
|
||||
throw new IllegalArgumentException("'path' must be non-null and start with '/'");
|
||||
}
|
||||
|
||||
return "/api/v1/" + tenantId + path;
|
||||
return tenantId == null ? "/api/v1" + path : "/api/v1/" + tenantId + path;
|
||||
}
|
||||
|
||||
@Builder
|
||||
|
||||
@@ -40,7 +40,7 @@ import picocli.CommandLine.Option;
|
||||
)
|
||||
@Slf4j
|
||||
@Introspected
|
||||
public abstract class AbstractCommand implements Callable<Integer> {
|
||||
abstract public class AbstractCommand implements Callable<Integer> {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@@ -93,7 +93,7 @@ public abstract class AbstractCommand implements Callable<Integer> {
|
||||
this.startupHook.start(this);
|
||||
}
|
||||
|
||||
if (pluginRegistryProvider != null && this.pluginsPath != null && loadExternalPlugins()) {
|
||||
if (this.pluginsPath != null && loadExternalPlugins()) {
|
||||
pluginRegistry = pluginRegistryProvider.get();
|
||||
pluginRegistry.registerIfAbsent(pluginsPath);
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.cli;
|
||||
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.models.validations.ValidateConstraintViolation;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
@@ -10,7 +9,6 @@ import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.IOException;
|
||||
@@ -33,15 +31,6 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "the directory containing files to check")
|
||||
protected Path directory;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return local;
|
||||
}
|
||||
|
||||
public static void handleException(ConstraintViolationException e, String resource) {
|
||||
stdErr("\t@|fg(red) Unable to parse {0} due to the following error(s):|@", resource);
|
||||
e.getConstraintViolations()
|
||||
@@ -79,9 +68,10 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
}
|
||||
}
|
||||
|
||||
// bug in micronaut, we can't inject ModelValidator, so we inject from implementation
|
||||
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation
|
||||
public Integer call(
|
||||
Class<?> cls,
|
||||
YamlParser yamlParser,
|
||||
ModelValidator modelValidator,
|
||||
Function<Object, String> identity,
|
||||
Function<Object, List<String>> warningsFunction,
|
||||
@@ -98,7 +88,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.forEach(path -> {
|
||||
try {
|
||||
Object parse = YamlParser.parse(path.toFile(), cls);
|
||||
Object parse = yamlParser.parse(path.toFile(), cls);
|
||||
modelValidator.validate(parse);
|
||||
stdOut("@|green \u2713|@ - " + identity.apply(parse));
|
||||
List<String> warnings = warningsFunction.apply(parse);
|
||||
@@ -117,7 +107,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/validate", tenantService.getTenantIdAndAllowEETenants(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/validate"), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<ValidateConstraintViolation> validations = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli;
|
||||
|
||||
import io.kestra.cli.commands.configs.sys.ConfigCommand;
|
||||
import io.kestra.cli.commands.flows.FlowCommand;
|
||||
import io.kestra.cli.commands.migrations.MigrationCommand;
|
||||
import io.kestra.cli.commands.namespaces.NamespaceCommand;
|
||||
import io.kestra.cli.commands.plugins.PluginCommand;
|
||||
import io.kestra.cli.commands.servers.ServerCommand;
|
||||
@@ -43,7 +42,6 @@ import java.util.concurrent.Callable;
|
||||
SysCommand.class,
|
||||
ConfigCommand.class,
|
||||
NamespaceCommand.class,
|
||||
MigrationCommand.class,
|
||||
}
|
||||
)
|
||||
@Introspected
|
||||
@@ -66,14 +64,8 @@ public class App implements Callable<Integer> {
|
||||
ApplicationContext applicationContext = App.applicationContext(cls, args);
|
||||
|
||||
// Call Picocli command
|
||||
int exitCode = 0;
|
||||
try {
|
||||
exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
|
||||
} catch (CommandLine.InitializationException e){
|
||||
System.err.println("Could not initialize picoli ComandLine, err: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
exitCode = 1;
|
||||
}
|
||||
int exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
|
||||
|
||||
applicationContext.close();
|
||||
|
||||
// exit code
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package io.kestra.core.validations;
|
||||
package io.kestra.cli;
|
||||
|
||||
import io.micronaut.context.annotation.Context;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
@@ -2,13 +2,11 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -25,9 +23,6 @@ public class FlowCreateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
|
||||
public Path flowFile;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -39,7 +34,7 @@ public class FlowCreateCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows"), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -2,12 +2,10 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -25,9 +23,6 @@ public class FlowDeleteCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "1", description = "The ID of the flow")
|
||||
public String id;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -35,7 +30,7 @@ public class FlowDeleteCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.DELETE(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)));
|
||||
.DELETE(apiUri("/flows/" + namespace + "/" + id ));
|
||||
|
||||
client.toBlocking().exchange(
|
||||
this.requestOptions(request)
|
||||
|
||||
@@ -29,7 +29,8 @@ public class FlowDotCommand extends AbstractCommand {
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
Flow flow = YamlParser.parse(file.toFile(), Flow.class);
|
||||
YamlParser parser = applicationContext.getBean(YamlParser.class);
|
||||
Flow flow = parser.parse(file.toFile(), Flow.class);
|
||||
|
||||
GraphCluster graph = GraphUtils.of(flow, null);
|
||||
|
||||
|
||||
@@ -20,6 +20,9 @@ public class FlowExpandCommand extends AbstractCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
|
||||
private Path file;
|
||||
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@@ -28,7 +31,7 @@ public class FlowExpandCommand extends AbstractCommand {
|
||||
super.call();
|
||||
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
|
||||
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
|
||||
Flow flow = YamlParser.parse(content, Flow.class);
|
||||
Flow flow = yamlParser.parse(content, Flow.class);
|
||||
modelValidator.validate(flow);
|
||||
stdOut(content);
|
||||
return 0;
|
||||
|
||||
@@ -2,7 +2,7 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.HttpResponse;
|
||||
import io.micronaut.http.MediaType;
|
||||
@@ -25,8 +25,9 @@ import java.nio.file.Path;
|
||||
public class FlowExportCommand extends AbstractApiCommand {
|
||||
private static final String DEFAULT_FILE_NAME = "flows.zip";
|
||||
|
||||
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of flows to export")
|
||||
public String namespace;
|
||||
@@ -40,7 +41,7 @@ public class FlowExportCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<Object> request = HttpRequest
|
||||
.GET(apiUri("/flows/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.GET(apiUri("/flows/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.accept(MediaType.APPLICATION_OCTET_STREAM);
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.cli.StandAloneRunner;
|
||||
import io.kestra.core.runners.StandAloneRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
@@ -31,7 +30,7 @@ import java.util.concurrent.TimeoutException;
|
||||
description = "Test a flow"
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowTestCommand extends AbstractApiCommand {
|
||||
public class FlowTestCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@@ -72,11 +71,11 @@ public class FlowTestCommand extends AbstractApiCommand {
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
StandAloneRunner runner = applicationContext.getBean(StandAloneRunner.class);
|
||||
LocalFlowRepositoryLoader repositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
|
||||
FlowRepositoryInterface flowRepository = applicationContext.getBean(FlowRepositoryInterface.class);
|
||||
FlowInputOutput flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
|
||||
RunnerUtils runnerUtils = applicationContext.getBean(RunnerUtils.class);
|
||||
TenantIdSelectorService tenantService = applicationContext.getBean(TenantIdSelectorService.class);
|
||||
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
@@ -88,9 +87,9 @@ public class FlowTestCommand extends AbstractApiCommand {
|
||||
inputs.put(this.inputs.get(i), this.inputs.get(i+1));
|
||||
}
|
||||
|
||||
try (StandAloneRunner runner = applicationContext.createBean(StandAloneRunner.class);){
|
||||
try {
|
||||
runner.run();
|
||||
repositoryLoader.load(tenantService.getTenantId(tenantId), file.toFile());
|
||||
repositoryLoader.load(file.toFile());
|
||||
|
||||
List<Flow> all = flowRepository.findAllForAllTenants();
|
||||
if (all.size() != 1) {
|
||||
@@ -102,6 +101,8 @@ public class FlowTestCommand extends AbstractApiCommand {
|
||||
(flow, execution) -> flowInputOutput.readExecutionInputs(flow, execution, inputs),
|
||||
Duration.ofHours(1)
|
||||
);
|
||||
|
||||
runner.close();
|
||||
} catch (ConstraintViolationException e) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), e.getMessage());
|
||||
} catch (IOException | TimeoutException e) {
|
||||
|
||||
@@ -2,13 +2,11 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -31,9 +29,6 @@ public class FlowUpdateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "2", description = "The ID of the flow")
|
||||
public String id;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -45,7 +40,7 @@ public class FlowUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.PUT(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
.PUT(apiUri("/flows/" + namespace + "/" + id ), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
@@ -10,7 +9,6 @@ import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
@@ -38,9 +36,6 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
|
||||
public String namespace;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantIdSelectorService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -71,7 +66,7 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
namespaceQuery = "&namespace=" + namespace;
|
||||
}
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/bulk", tenantIdSelectorService.getTenantId(tenantId)) + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/bulk") + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
@@ -92,9 +87,4 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
@@ -16,6 +16,8 @@ import java.util.List;
|
||||
description = "Validate a flow"
|
||||
)
|
||||
public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -23,29 +25,26 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private FlowService flowService;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantIdSelectorService;
|
||||
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
FlowWithSource.class,
|
||||
Flow.class,
|
||||
yamlParser,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
Flow flow = (Flow) object;
|
||||
return flow.getNamespace() + " / " + flow.getId();
|
||||
},
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
Flow flow = (Flow) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow, tenantIdSelectorService.getTenantIdAndAllowEETenants(tenantId)));
|
||||
warnings.addAll(flowService.warnings(flow, this.tenantId));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
Flow flow = (Flow) object;
|
||||
return flowService.relocations(flow.generateSource()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package io.kestra.cli.commands.flows.namespaces;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.commands.flows.IncludeHelperExpander;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
@@ -28,13 +27,12 @@ import java.util.List;
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
@Inject
|
||||
public YamlParser yamlParser;
|
||||
|
||||
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
|
||||
public boolean override = false;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -64,7 +62,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/") + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "migrate",
|
||||
description = "handle migrations",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TenantMigrationCommand.class,
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class MigrationCommand extends AbstractCommand {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "migrate", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.repositories.TenantMigrationInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "default-tenant",
|
||||
description = "migrate every elements from no tenant to the main tenant"
|
||||
)
|
||||
@Slf4j
|
||||
public class TenantMigrationCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Option(names = "--tenant-id", description = "tenant identifier")
|
||||
String tenantId;
|
||||
|
||||
@Option(names = "--tenant-name", description = "tenant name")
|
||||
String tenantName;
|
||||
|
||||
@Option(names = "--dry-run", description = "Preview only, do not update")
|
||||
boolean dryRun;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (dryRun) {
|
||||
System.out.println("🧪 Dry-run mode enabled. No changes will be applied.");
|
||||
}
|
||||
|
||||
TenantMigrationService migrationService = this.applicationContext.getBean(TenantMigrationService.class);
|
||||
try {
|
||||
migrationService.migrateTenant(tenantId, tenantName, dryRun);
|
||||
System.out.println("✅ Tenant migration complete.");
|
||||
} catch (Exception e) {
|
||||
System.err.println("❌ Tenant migration failed: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
import com.github.javaparser.utils.Log;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.TenantMigrationInterface;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class TenantMigrationService {
|
||||
|
||||
@Inject
|
||||
private TenantMigrationInterface tenantMigrationInterface;
|
||||
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.FLOW_NAMED)
|
||||
private QueueInterface<FlowInterface> flowQueue;
|
||||
|
||||
public void migrateTenant(String tenantId, String tenantName, boolean dryRun) {
|
||||
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
|
||||
throw new KestraRuntimeException("Tenant configuration is an enterprise feature. It can only be main in OSS");
|
||||
}
|
||||
|
||||
Log.info("🔁 Starting tenant migration...");
|
||||
tenantMigrationInterface.migrateTenant(MAIN_TENANT, dryRun);
|
||||
migrateQueue(dryRun);
|
||||
}
|
||||
|
||||
protected void migrateQueue(boolean dryRun) {
|
||||
if (!dryRun){
|
||||
log.info("🔁 Starting restoring queue...");
|
||||
flowRepository.findAllWithSourceForAllTenants().forEach(flow -> {
|
||||
try {
|
||||
flowQueue.emit(flow);
|
||||
} catch (QueueException e) {
|
||||
log.warn("Unable to send the flow {} to the queue", flow.uid(), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -2,14 +2,12 @@ package io.kestra.cli.commands.namespaces.files;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.utils.KestraIgnore;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.multipart.MultipartBody;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -36,9 +34,6 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
private static final String KESTRA_IGNORE_FILE = ".kestraignore";
|
||||
|
||||
@Override
|
||||
@@ -49,7 +44,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
try (var files = Files.walk(from); DefaultHttpClient client = client()) {
|
||||
if (delete) {
|
||||
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + to, null)));
|
||||
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/") + namespace + "/files?path=" + to, null)));
|
||||
}
|
||||
|
||||
KestraIgnore kestraIgnore = new KestraIgnore(from);
|
||||
@@ -67,7 +62,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
client.toBlocking().exchange(
|
||||
this.requestOptions(
|
||||
HttpRequest.POST(
|
||||
apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + destination,
|
||||
apiUri("/namespaces/") + namespace + "/files?path=" + destination,
|
||||
body
|
||||
).contentType(MediaType.MULTIPART_FORM_DATA)
|
||||
)
|
||||
|
||||
@@ -3,13 +3,11 @@ package io.kestra.cli.commands.namespaces.kv;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
@@ -44,9 +42,6 @@ public class KvUpdateCommand extends AbstractApiCommand {
|
||||
@Option(names = {"-f", "--file-value"}, description = "The file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
|
||||
public Path fileValue;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -61,8 +56,8 @@ public class KvUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
Duration ttl = expiration == null ? null : Duration.parse(expiration);
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.PUT(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/kv/" + key, value)
|
||||
.contentType(MediaType.TEXT_PLAIN);
|
||||
.PUT(apiUri("/namespaces/") + namespace + "/kv/" + key, value)
|
||||
.contentType(MediaType.APPLICATION_JSON_TYPE);
|
||||
|
||||
if (ttl != null) {
|
||||
request.header("ttl", ttl.toString());
|
||||
|
||||
@@ -12,7 +12,6 @@ import picocli.CommandLine.Command;
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
PluginInstallCommand.class,
|
||||
PluginUninstallCommand.class,
|
||||
PluginListCommand.class,
|
||||
PluginDocCommand.class,
|
||||
PluginSearchCommand.class
|
||||
|
||||
@@ -18,8 +18,6 @@ import java.nio.file.Paths;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
|
||||
import static io.kestra.core.models.Plugin.isDeprecated;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "doc",
|
||||
description = "Generate documentation for all plugins currently installed"
|
||||
@@ -40,9 +38,6 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
@CommandLine.Option(names = {"--schema"}, description = "Also write JSON Schema for each task")
|
||||
private boolean schema = false;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-deprecated"},description = "Skip deprecated plugins when generating documentations")
|
||||
private boolean skipDeprecated = false;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -50,11 +45,6 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
|
||||
PluginRegistry registry = pluginRegistryProvider.get();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
if (skipDeprecated) {
|
||||
plugins = plugins.stream()
|
||||
.filter(plugin -> !isDeprecated(plugin.getClass()))
|
||||
.toList();
|
||||
}
|
||||
boolean hasFailures = false;
|
||||
|
||||
for (RegisteredPlugin registeredPlugin : plugins) {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
@@ -52,7 +51,7 @@ public class PluginInstallCommand extends AbstractCommand {
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Inject
|
||||
Provider<PluginCatalogService> pluginCatalogService;
|
||||
@Client("api") HttpClient httpClient;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -86,7 +85,7 @@ public class PluginInstallCommand extends AbstractCommand {
|
||||
}
|
||||
|
||||
if (all) {
|
||||
PluginCatalogService service = pluginCatalogService.get();
|
||||
PluginCatalogService service = new PluginCatalogService(httpClient, false, true);
|
||||
dependencies = service.get().stream().map(Objects::toString).toList();
|
||||
}
|
||||
|
||||
@@ -104,21 +103,12 @@ public class PluginInstallCommand extends AbstractCommand {
|
||||
}
|
||||
|
||||
try (final PluginManager pluginManager = getPluginManager()) {
|
||||
|
||||
List<PluginArtifact> installed;
|
||||
if (all) {
|
||||
installed = new ArrayList<>(pluginArtifacts.size());
|
||||
for (PluginArtifact pluginArtifact : pluginArtifacts) {
|
||||
try {
|
||||
installed.add(pluginManager.install(pluginArtifact, repositoryConfigs, false, pluginsPath));
|
||||
} catch (KestraRuntimeException e) {
|
||||
String cause = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
|
||||
stdErr("Failed to install plugin {0}. Cause: {1}", pluginArtifact, cause);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
installed = pluginManager.install(pluginArtifacts, repositoryConfigs, false, pluginsPath);
|
||||
}
|
||||
List<PluginArtifact> installed = pluginManager.install(
|
||||
pluginArtifacts,
|
||||
repositoryConfigs,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
|
||||
|
||||
@@ -17,10 +17,10 @@ import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "uninstall",
|
||||
description = "Uninstall plugins"
|
||||
description = "uninstall a plugin"
|
||||
)
|
||||
public class PluginUninstallCommand extends AbstractCommand {
|
||||
@Parameters(index = "0..*", description = "The plugins to uninstall. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
@Parameters(index = "0..*", description = "the plugins to uninstall")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Spec
|
||||
|
||||
@@ -16,6 +16,6 @@ abstract public class AbstractServerCommand extends AbstractCommand implements S
|
||||
}
|
||||
|
||||
protected static int defaultWorkerThread() {
|
||||
return Runtime.getRuntime().availableProcessors() * 8;
|
||||
return Runtime.getRuntime().availableProcessors() * 4;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,12 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.Indexer;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@CommandLine.Command(
|
||||
@@ -20,11 +17,6 @@ import java.util.Map;
|
||||
public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@Inject
|
||||
private SkipExecutionService skipExecutionService;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static Map<String, Object> propertiesOverrides() {
|
||||
@@ -35,11 +27,9 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.skipExecutionService.setSkipIndexerRecords(skipIndexerRecords);
|
||||
|
||||
super.call();
|
||||
|
||||
Indexer indexer = applicationContext.getBean(Indexer.class);
|
||||
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
|
||||
indexer.run();
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
@@ -2,7 +2,7 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.scheduler.AbstractScheduler;
|
||||
import io.kestra.core.schedulers.AbstractScheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
|
||||
@@ -2,11 +2,9 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.services.FileChangedEventListener;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
import io.kestra.cli.StandAloneRunner;
|
||||
import io.kestra.core.runners.StandAloneRunner;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -45,10 +43,7 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "the flow path containing flow to inject at startup (when running with a memory flow repository)")
|
||||
private File flowPath;
|
||||
|
||||
@CommandLine.Option(names = "--tenant", description = "Tenant identifier, Required to load flows from path with the enterprise edition")
|
||||
private String tenantId;
|
||||
|
||||
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to eight times the number of available processors. Set it to 0 to avoid starting a worker.")
|
||||
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to four times the number of available processors. Set it to 0 to avoid starting a worker.")
|
||||
private int workerThread = defaultWorkerThread();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@@ -63,9 +58,6 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "a list of tenants to skip, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipTenants = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
|
||||
boolean tutorialsDisabled = false;
|
||||
|
||||
@@ -96,44 +88,40 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
this.skipExecutionService.setSkipFlows(skipFlows);
|
||||
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
|
||||
this.skipExecutionService.setSkipTenants(skipTenants);
|
||||
this.skipExecutionService.setSkipIndexerRecords(skipIndexerRecords);
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
super.call();
|
||||
|
||||
if (flowPath != null) {
|
||||
try {
|
||||
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
|
||||
TenantIdSelectorService tenantIdSelectorService = applicationContext.getBean(TenantIdSelectorService.class);
|
||||
localFlowRepositoryLoader.load(tenantIdSelectorService.getTenantId(this.tenantId), this.flowPath);
|
||||
localFlowRepositoryLoader.load(this.flowPath);
|
||||
} catch (IOException e) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
|
||||
}
|
||||
}
|
||||
|
||||
try (StandAloneRunner standAloneRunner = applicationContext.getBean(StandAloneRunner.class)) {
|
||||
StandAloneRunner standAloneRunner = applicationContext.getBean(StandAloneRunner.class);
|
||||
|
||||
if (this.workerThread == 0) {
|
||||
standAloneRunner.setWorkerEnabled(false);
|
||||
} else {
|
||||
standAloneRunner.setWorkerThread(this.workerThread);
|
||||
}
|
||||
|
||||
if (this.indexerDisabled) {
|
||||
standAloneRunner.setIndexerEnabled(false);
|
||||
}
|
||||
|
||||
standAloneRunner.run();
|
||||
|
||||
if (fileWatcher != null) {
|
||||
fileWatcher.startListeningFromConfig();
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
if (this.workerThread == 0) {
|
||||
standAloneRunner.setWorkerEnabled(false);
|
||||
} else {
|
||||
standAloneRunner.setWorkerThread(this.workerThread);
|
||||
}
|
||||
|
||||
if (this.indexerDisabled) {
|
||||
standAloneRunner.setIndexerEnabled(false);
|
||||
}
|
||||
|
||||
standAloneRunner.run();
|
||||
|
||||
if (fileWatcher != null) {
|
||||
fileWatcher.startListeningFromConfig();
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,18 +2,15 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.Indexer;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
@@ -31,17 +28,11 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ExecutorsUtils executorsUtils;
|
||||
|
||||
@Inject
|
||||
private SkipExecutionService skipExecutionService;
|
||||
|
||||
@Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
|
||||
private boolean tutorialsDisabled = false;
|
||||
boolean tutorialsDisabled = false;
|
||||
|
||||
@Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
|
||||
private boolean indexerDisabled = false;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
boolean indexerDisabled = false;
|
||||
|
||||
@Override
|
||||
public boolean isFlowAutoLoadEnabled() {
|
||||
@@ -57,15 +48,13 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.skipExecutionService.setSkipIndexerRecords(skipIndexerRecords);
|
||||
|
||||
super.call();
|
||||
|
||||
// start the indexer
|
||||
if (!indexerDisabled) {
|
||||
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
|
||||
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
|
||||
poolExecutor.execute(applicationContext.getBean(Indexer.class));
|
||||
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
|
||||
shutdownHook(false, () -> poolExecutor.shutdown());
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.Worker;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -22,7 +21,7 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to eight times the number of available processors")
|
||||
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to four times the number of available processors")
|
||||
private int thread = defaultWorkerThread();
|
||||
|
||||
@Option(names = {"-g", "--worker-group"}, description = "The worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
|
||||
@@ -37,11 +36,7 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(thread, workerGroupKey);
|
||||
|
||||
super.call();
|
||||
|
||||
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
|
||||
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.sys;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -10,7 +9,6 @@ import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "reindex",
|
||||
@@ -35,8 +33,8 @@ public class ReindexCommand extends AbstractCommand {
|
||||
List<Flow> allFlow = flowRepository.findAllForAllTenants();
|
||||
allFlow.stream()
|
||||
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
|
||||
.filter(Objects::nonNull)
|
||||
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
|
||||
.filter(flow -> flow != null)
|
||||
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow()));
|
||||
|
||||
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.ExecutionQueued;
|
||||
import io.kestra.core.services.ConcurrencyLimitService;
|
||||
import io.kestra.jdbc.runner.AbstractJdbcExecutionQueuedStorage;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -16,6 +15,8 @@ import picocli.CommandLine;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "submit-queued-execution",
|
||||
description = {"Submit all queued execution to the executor",
|
||||
@@ -48,11 +49,9 @@ public class SubmitQueuedCommand extends AbstractCommand {
|
||||
}
|
||||
else if (queueType.get().equals("postgres") || queueType.get().equals("mysql") || queueType.get().equals("h2")) {
|
||||
var executionQueuedStorage = applicationContext.getBean(AbstractJdbcExecutionQueuedStorage.class);
|
||||
var concurrencyLimitService = applicationContext.getBean(ConcurrencyLimitService.class);
|
||||
|
||||
for (ExecutionQueued queued : executionQueuedStorage.getAllForAllTenants()) {
|
||||
Execution restart = concurrencyLimitService.unqueue(queued.getExecution(), State.Type.RUNNING);
|
||||
executionQueue.emit(restart);
|
||||
executionQueuedStorage.pop(queued.getTenantId(), queued.getNamespace(), queued.getFlowId(), throwConsumer(execution -> executionQueue.emit(execution.withState(State.Type.CREATED))));
|
||||
cpt++;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@ package io.kestra.cli.commands.templates;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.HttpResponse;
|
||||
import io.micronaut.http.MediaType;
|
||||
@@ -27,8 +27,9 @@ import java.nio.file.Path;
|
||||
public class TemplateExportCommand extends AbstractApiCommand {
|
||||
private static final String DEFAULT_FILE_NAME = "templates.zip";
|
||||
|
||||
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
|
||||
public String namespace;
|
||||
@@ -42,7 +43,7 @@ public class TemplateExportCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<Object> request = HttpRequest
|
||||
.GET(apiUri("/templates/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.GET(apiUri("/templates/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.accept(MediaType.APPLICATION_OCTET_STREAM);
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -15,6 +16,8 @@ import java.util.Collections;
|
||||
)
|
||||
@TemplateEnabled
|
||||
public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -23,6 +26,7 @@ public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
Template.class,
|
||||
yamlParser,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
Template template = (Template) object;
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.templates.namespaces;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
@@ -28,9 +27,8 @@ import jakarta.validation.ConstraintViolationException;
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
public YamlParser yamlParser;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -40,7 +38,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
|
||||
List<Template> templates = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> YamlParser.parse(path.toFile(), Template.class))
|
||||
.map(path -> yamlParser.parse(path.toFile(), Template.class))
|
||||
.toList();
|
||||
|
||||
if (templates.isEmpty()) {
|
||||
@@ -49,7 +47,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
|
||||
|
||||
try (DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<List<Template>> request = HttpRequest
|
||||
.POST(apiUri("/templates/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, templates);
|
||||
.POST(apiUri("/templates/") + namespace + "?delete=" + delete, templates);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.exceptions.FlowProcessingException;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithPath;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@@ -40,18 +40,26 @@ public class FileChangedEventListener {
|
||||
@Inject
|
||||
private PluginDefaultService pluginDefaultService;
|
||||
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
protected FlowListenersInterface flowListeners;
|
||||
|
||||
@Nullable
|
||||
@Value("${micronaut.io.watch.tenantId}")
|
||||
private String tenantId;
|
||||
|
||||
FlowFilesManager flowFilesManager;
|
||||
|
||||
private List<FlowWithPath> flows = new CopyOnWriteArrayList<>();
|
||||
private List<FlowWithPath> flows = new ArrayList<>();
|
||||
|
||||
private boolean isStarted = false;
|
||||
|
||||
|
||||
@Inject
|
||||
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
|
||||
this.fileWatchConfiguration = fileWatchConfiguration;
|
||||
@@ -60,7 +68,7 @@ public class FileChangedEventListener {
|
||||
|
||||
public void startListeningFromConfig() throws IOException, InterruptedException {
|
||||
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface, pluginDefaultService);
|
||||
List<Path> paths = fileWatchConfiguration.getPaths();
|
||||
this.setup(paths);
|
||||
|
||||
@@ -68,7 +76,7 @@ public class FileChangedEventListener {
|
||||
// Init existing flows not already in files
|
||||
flowListeners.listen(flows -> {
|
||||
if (!isStarted) {
|
||||
for (FlowInterface flow : flows) {
|
||||
for (FlowWithSource flow : flows) {
|
||||
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
|
||||
flowToFile(flow, this.buildPath(flow));
|
||||
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
|
||||
@@ -129,7 +137,7 @@ public class FileChangedEventListener {
|
||||
try {
|
||||
String content = Files.readString(filePath, Charset.defaultCharset());
|
||||
|
||||
Optional<FlowWithSource> flow = parseFlow(content, entry);
|
||||
Optional<Flow> flow = parseFlow(content, entry);
|
||||
if (flow.isPresent()) {
|
||||
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
// Check if we already have a file with the given path
|
||||
@@ -148,20 +156,12 @@ public class FileChangedEventListener {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(filePath), content));
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
|
||||
}
|
||||
|
||||
} catch (NoSuchFileException e) {
|
||||
log.warn("File not found: {}, deleting it", entry, e);
|
||||
// the file might have been deleted while reading so if not found we try to delete the flow
|
||||
flows.stream()
|
||||
.filter(flow -> flow.getPath().equals(filePath.toString()))
|
||||
.findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
|
||||
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
|
||||
});
|
||||
log.error("File not found: {}", entry, e);
|
||||
} catch (IOException e) {
|
||||
log.error("Error reading file: {}", entry, e);
|
||||
}
|
||||
@@ -207,11 +207,11 @@ public class FileChangedEventListener {
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) {
|
||||
String content = Files.readString(file, Charset.defaultCharset());
|
||||
Optional<FlowWithSource> flow = parseFlow(content, file);
|
||||
Optional<Flow> flow = parseFlow(content, file);
|
||||
|
||||
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
|
||||
flows.add(FlowWithPath.of(flow.get(), file.toString()));
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(file), content));
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
@@ -223,25 +223,27 @@ public class FileChangedEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
private void flowToFile(FlowInterface flow, Path path) {
|
||||
private void flowToFile(FlowWithSource flow, Path path) {
|
||||
Path defaultPath = path != null ? path : this.buildPath(flow);
|
||||
|
||||
try {
|
||||
Files.writeString(defaultPath, flow.source());
|
||||
Files.writeString(defaultPath, flow.getSource());
|
||||
log.info("Flow {} has been written to file {}", flow.getId(), defaultPath);
|
||||
} catch (IOException e) {
|
||||
log.error("Error writing file: {}", defaultPath, e);
|
||||
}
|
||||
}
|
||||
|
||||
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
|
||||
private Optional<Flow> parseFlow(String content, Path entry) {
|
||||
try {
|
||||
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(getTenantIdFromPath(entry), content, false);
|
||||
modelValidator.validate(flow);
|
||||
Flow flow = yamlParser.parse(content, Flow.class);
|
||||
FlowWithSource withPluginDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
modelValidator.validate(withPluginDefault);
|
||||
return Optional.of(flow);
|
||||
} catch (ConstraintViolationException | FlowProcessingException e) {
|
||||
} catch (ConstraintViolationException e) {
|
||||
log.warn("Error while parsing flow: {}", entry, e);
|
||||
}
|
||||
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@@ -257,11 +259,7 @@ public class FileChangedEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
private Path buildPath(FlowInterface flow) {
|
||||
private Path buildPath(Flow flow) {
|
||||
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
|
||||
}
|
||||
|
||||
private String getTenantIdFromPath(Path path) {
|
||||
return path.getFileName().toString().split("_")[0];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
|
||||
public interface FlowFilesManager {
|
||||
|
||||
FlowWithSource createOrUpdateFlow(GenericFlow flow);
|
||||
FlowWithSource createOrUpdateFlow(Flow flow, String content);
|
||||
|
||||
void deleteFlow(FlowWithSource toDelete);
|
||||
|
||||
|
||||
@@ -1,23 +1,27 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public class LocalFlowFileWatcher implements FlowFilesManager {
|
||||
private final FlowRepositoryInterface flowRepository;
|
||||
private final PluginDefaultService pluginDefaultService;
|
||||
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository, PluginDefaultService pluginDefaultService) {
|
||||
this.flowRepository = flowRepository;
|
||||
this.pluginDefaultService = pluginDefaultService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
|
||||
return flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous))
|
||||
.orElseGet(() -> flowRepository.create(flow));
|
||||
public FlowWithSource createOrUpdateFlow(Flow flow, String content) {
|
||||
FlowWithSource withDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
return flowRepository.findById(null, flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous, content, withDefault))
|
||||
.orElseGet(() -> flowRepository.create(flow, content, withDefault));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@Singleton
|
||||
public class TenantIdSelectorService {
|
||||
|
||||
//For override purpose in Kestra EE
|
||||
public String getTenantId(String tenantId) {
|
||||
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
|
||||
throw new KestraRuntimeException("Tenant id can only be 'main'");
|
||||
}
|
||||
return MAIN_TENANT;
|
||||
}
|
||||
|
||||
public String getTenantIdAndAllowEETenants(String tenantId) {
|
||||
if (StringUtils.isNotBlank(tenantId)){
|
||||
return tenantId;
|
||||
}
|
||||
return MAIN_TENANT;
|
||||
}
|
||||
}
|
||||
@@ -15,13 +15,6 @@ micronaut:
|
||||
static:
|
||||
paths: classpath:static
|
||||
mapping: /static/**
|
||||
root:
|
||||
paths: classpath:root
|
||||
mapping: /**
|
||||
codec:
|
||||
json:
|
||||
additional-types:
|
||||
- application/scim+json
|
||||
server:
|
||||
max-request-size: 10GB
|
||||
multipart:
|
||||
@@ -31,14 +24,13 @@ micronaut:
|
||||
write-idle-timeout: 60m
|
||||
idle-timeout: 60m
|
||||
netty:
|
||||
max-zstd-encode-size: 67108864 # increased to 64MB from the default of 32MB
|
||||
max-chunk-size: 10MB
|
||||
max-header-size: 32768 # increased from the default of 8k
|
||||
responses:
|
||||
file:
|
||||
cache-seconds: 86400
|
||||
cache-control:
|
||||
public: true
|
||||
responses:
|
||||
file:
|
||||
cache-seconds: 86400
|
||||
cache-control:
|
||||
public: true
|
||||
|
||||
# Access log configuration, see https://docs.micronaut.io/latest/guide/index.html#accessLogger
|
||||
access-logger:
|
||||
@@ -82,19 +74,8 @@ micronaut:
|
||||
type: scheduled
|
||||
core-pool-size: 1
|
||||
|
||||
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
|
||||
metrics:
|
||||
binders:
|
||||
retry:
|
||||
enabled: true
|
||||
netty:
|
||||
queues:
|
||||
enabled: true
|
||||
bytebuf-allocators:
|
||||
enabled: true
|
||||
channels:
|
||||
enabled: true
|
||||
|
||||
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
|
||||
export:
|
||||
otlp:
|
||||
enabled: false
|
||||
@@ -107,8 +88,6 @@ jackson:
|
||||
serialization-inclusion: non_null
|
||||
deserialization:
|
||||
FAIL_ON_UNKNOWN_PROPERTIES: false
|
||||
mapper:
|
||||
ACCEPT_CASE_INSENSITIVE_ENUMS: true
|
||||
|
||||
endpoints:
|
||||
all:
|
||||
@@ -117,10 +96,6 @@ endpoints:
|
||||
sensitive: false
|
||||
health:
|
||||
details-visible: ANONYMOUS
|
||||
disk-space:
|
||||
enabled: false
|
||||
discovery-client:
|
||||
enabled: false
|
||||
loggers:
|
||||
write-sensitive: false
|
||||
env:
|
||||
@@ -154,59 +129,24 @@ kestra:
|
||||
tutorial-flows:
|
||||
# Automatically loads all tutorial flows at startup.
|
||||
enabled: true
|
||||
|
||||
retries:
|
||||
attempts: 5
|
||||
multiplier: 2.0
|
||||
delay: 1s
|
||||
maxDelay: ""
|
||||
|
||||
server:
|
||||
basic-auth:
|
||||
# These URLs will not be authenticated, by default we open some of the Micronaut default endpoints but not all for security reasons
|
||||
open-urls:
|
||||
- "/ping"
|
||||
- "/api/v1/executions/webhook/"
|
||||
- "/api/v1/main/executions/webhook/"
|
||||
- "/api/v1/*/executions/webhook/"
|
||||
- "/api/v1/basicAuthValidationErrors"
|
||||
|
||||
preview:
|
||||
initial-rows: 100
|
||||
max-rows: 5000
|
||||
|
||||
# The expected time for this server to complete all its tasks before initiating a graceful shutdown.
|
||||
terminationGracePeriod: 5m
|
||||
workerTaskRestartStrategy: AFTER_TERMINATION_GRACE_PERIOD
|
||||
# Configuration for Liveness and Heartbeat mechanism between servers.
|
||||
liveness:
|
||||
enabled: true
|
||||
# The expected time between liveness probe.
|
||||
interval: 10s
|
||||
# The timeout used to detect service failures.
|
||||
timeout: 1m
|
||||
# The time to wait before executing a liveness probe.
|
||||
initialDelay: 1m
|
||||
# The expected time between service heartbeats.
|
||||
heartbeatInterval: 3s
|
||||
service:
|
||||
purge:
|
||||
initial-delay: 1h
|
||||
fixed-delay: 1d
|
||||
retention: 30d
|
||||
|
||||
jdbc:
|
||||
queues:
|
||||
min-poll-interval: 25ms
|
||||
max-poll-interval: 500ms
|
||||
poll-switch-interval: 60s
|
||||
max-poll-interval: 1000ms
|
||||
poll-switch-interval: 5s
|
||||
|
||||
cleaner:
|
||||
initial-delay: 1h
|
||||
fixed-delay: 1h
|
||||
retention: 7d
|
||||
types:
|
||||
- type: io.kestra.core.models.executions.LogEntry
|
||||
- type : io.kestra.core.models.executions.LogEntry
|
||||
retention: 1h
|
||||
- type: io.kestra.core.models.executions.MetricEntry
|
||||
retention: 1h
|
||||
@@ -228,7 +168,7 @@ kestra:
|
||||
values:
|
||||
recoverMissedSchedules: ALL
|
||||
variables:
|
||||
env-vars-prefix: ENV_
|
||||
env-vars-prefix: KESTRA_
|
||||
cache-enabled: true
|
||||
cache-size: 1000
|
||||
|
||||
@@ -238,12 +178,33 @@ kestra:
|
||||
traces:
|
||||
root: DISABLED
|
||||
|
||||
ui-anonymous-usage-report:
|
||||
enabled: true
|
||||
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
# These URLs will not be authenticated, by default we open some of the Micronaut default endpoints but not all for security reasons
|
||||
open-urls:
|
||||
- "/ping"
|
||||
- "/api/v1/executions/webhook/"
|
||||
preview:
|
||||
initial-rows: 100
|
||||
max-rows: 5000
|
||||
# The expected time for this server to complete all its tasks before initiating a graceful shutdown.
|
||||
terminationGracePeriod: 5m
|
||||
workerTaskRestartStrategy: AFTER_TERMINATION_GRACE_PERIOD
|
||||
# Configuration for Liveness and Heartbeat mechanism between servers.
|
||||
liveness:
|
||||
enabled: true
|
||||
# The expected time between liveness probe.
|
||||
interval: 5s
|
||||
# The timeout used to detect service failures.
|
||||
timeout: 45s
|
||||
# The time to wait before executing a liveness probe.
|
||||
initialDelay: 45s
|
||||
# The expected time between service heartbeats.
|
||||
heartbeatInterval: 3s
|
||||
anonymous-usage-report:
|
||||
enabled: true
|
||||
uri: https://api.kestra.io/v1/reports/server-events
|
||||
uri: https://api.kestra.io/v1/reports/usages
|
||||
initial-delay: 5m
|
||||
fixed-delay: 1h
|
||||
|
||||
@@ -259,4 +220,4 @@ otel:
|
||||
- /health
|
||||
- /env
|
||||
- /prometheus
|
||||
propagators: tracecontext, baggage
|
||||
propagators: tracecontext, baggage
|
||||
@@ -13,7 +13,8 @@ import picocli.CommandLine;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class AppTest {
|
||||
@@ -25,7 +26,7 @@ class AppTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(App.class, ctx, "--help");
|
||||
|
||||
assertThat(out.toString()).contains("kestra");
|
||||
assertThat(out.toString(), containsString("kestra"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +42,7 @@ class AppTest {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
|
||||
|
||||
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
|
||||
assertThat(out.toString()).startsWith("Usage: kestra server " + serverType);
|
||||
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,9 +56,9 @@ class AppTest {
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString()).startsWith("Missing required parameters: ");
|
||||
assertThat(out.toString()).contains("Usage: kestra flow namespace update ");
|
||||
assertThat(out.toString()).doesNotContain("MissingParameterException: ");
|
||||
assertThat(out.toString(), startsWith("Missing required parameters: "));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace update "));
|
||||
assertThat(out.toString(), not(containsString("MissingParameterException: ")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package io.kestra.core.validations;
|
||||
package io.kestra.cli;
|
||||
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.exceptions.BeanInstantiationException;
|
||||
@@ -8,7 +8,8 @@ import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
class ServerCommandValidatorTest {
|
||||
|
||||
@@ -39,8 +40,8 @@ class ServerCommandValidatorTest {
|
||||
.start()
|
||||
);
|
||||
final Throwable rootException = getRootException(exception);
|
||||
assertThat(rootException.getClass()).isEqualTo(ServerCommandValidator.ServerCommandException.class);
|
||||
assertThat(rootException.getMessage()).isEqualTo("Incomplete server configuration - missing required properties");
|
||||
assertThat(rootException.getClass(), is(ServerCommandValidator.ServerCommandException.class));
|
||||
assertThat(rootException.getMessage(), is("Incomplete server configuration - missing required properties"));
|
||||
}
|
||||
|
||||
private Throwable getRootException(Throwable exception) {
|
||||
@@ -4,14 +4,12 @@ import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.yaml.snakeyaml.Yaml;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class ConfigPropertiesCommandTest {
|
||||
@Test
|
||||
@@ -22,52 +20,8 @@ class ConfigPropertiesCommandTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString()).contains("activeEnvironments:");
|
||||
assertThat(out.toString()).contains("- test");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldOutputCustomEnvironment() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, "custom-env")) {
|
||||
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString()).contains("activeEnvironments:");
|
||||
assertThat(out.toString()).contains("- custom-env");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReturnZeroOnSuccess() throws Exception {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
ConfigPropertiesCommand cmd = ctx.createBean(ConfigPropertiesCommand.class);
|
||||
int result = cmd.call();
|
||||
|
||||
assertThat(result).isZero();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldOutputValidYaml() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
|
||||
|
||||
String output = out.toString();
|
||||
Yaml yaml = new Yaml();
|
||||
Throwable thrown = catchThrowable(() -> {
|
||||
Map<?, ?> parsed = yaml.load(output);
|
||||
assertThat(parsed).isInstanceOf(Map.class);
|
||||
});
|
||||
assertThat(thrown).isNull();
|
||||
assertThat(out.toString(), containsString("activeEnvironments:"));
|
||||
assertThat(out.toString(), containsString("- test"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowCreateOrUpdateCommandTest {
|
||||
@RetryingTest(5) // flaky on CI but cannot be reproduced even with 100 repetitions
|
||||
@@ -36,7 +38,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -51,7 +53,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +80,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
@@ -91,7 +93,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -104,35 +106,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void should_fail_with_incorrect_tenant() {
|
||||
URL directory = FlowCreateOrUpdateCommandTest.class.getClassLoader().getResource("flows");
|
||||
|
||||
ByteArrayOutputStream err = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(err));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"--tenant", "incorrect",
|
||||
directory.getPath(),
|
||||
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(err.toString()).contains("Tenant id can only be 'main'");
|
||||
err.reset();
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,8 +131,8 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowDotCommandTest {
|
||||
@Test
|
||||
@@ -24,8 +26,8 @@ class FlowDotCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowDotCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("\"root.date\"[shape=box];");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("\"root.date\"[shape=box];"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,8 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class FlowExpandCommandTest {
|
||||
@SuppressWarnings("deprecation")
|
||||
@@ -22,20 +23,22 @@ class FlowExpandCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowExpandCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).isEqualTo("id: include\n" +
|
||||
"namespace: io.kestra.cli\n" +
|
||||
"\n" +
|
||||
"# The list of tasks\n" +
|
||||
"tasks:\n" +
|
||||
"- id: t1\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: \"Lorem ipsum dolor sit amet\"\n" +
|
||||
"- id: t2\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: |\n" +
|
||||
" Lorem ipsum dolor sit amet\n" +
|
||||
" Lorem ipsum dolor sit amet\n");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), is(
|
||||
"id: include\n" +
|
||||
"namespace: io.kestra.cli\n" +
|
||||
"\n" +
|
||||
"# The list of tasks\n" +
|
||||
"tasks:\n" +
|
||||
"- id: t1\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: \"Lorem ipsum dolor sit amet\"\n" +
|
||||
"- id: t2\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: |\n" +
|
||||
" Lorem ipsum dolor sit amet\n" +
|
||||
" Lorem ipsum dolor sit amet\n"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,10 @@ import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowExportCommandTest {
|
||||
@Test
|
||||
@@ -29,8 +32,6 @@ class FlowExportCommandTest {
|
||||
|
||||
// we use the update command to add flows to extract
|
||||
String[] updateArgs = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -39,12 +40,10 @@ class FlowExportCommandTest {
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, updateArgs);
|
||||
assertThat(out.toString()).contains("3 flow(s)");
|
||||
assertThat(out.toString(), containsString("3 flow(s)"));
|
||||
|
||||
// then we export them
|
||||
String[] exportArgs = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -55,11 +54,11 @@ class FlowExportCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowExportCommand.class, ctx, exportArgs);
|
||||
File file = new File("/tmp/flows.zip");
|
||||
assertThat(file.exists()).isTrue();
|
||||
assertThat(file.exists(), is(true));
|
||||
ZipFile zipFile = new ZipFile(file);
|
||||
|
||||
// When launching the test in a suite, there is 4 flows but when lauching individualy there is only 3
|
||||
assertThat(zipFile.stream().count()).isGreaterThanOrEqualTo(3L);
|
||||
assertThat(zipFile.stream().count(), greaterThanOrEqualTo(3L));
|
||||
|
||||
file.delete();
|
||||
}
|
||||
|
||||
@@ -10,7 +10,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowUpdatesCommandTest {
|
||||
@Test
|
||||
@@ -26,8 +28,6 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -37,12 +37,10 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("successfully updated !");
|
||||
assertThat(out.toString(), containsString("successfully updated !"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -54,7 +52,7 @@ class FlowUpdatesCommandTest {
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString()).contains("successfully updated !");
|
||||
assertThat(out.toString(), containsString("successfully updated !"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,8 +70,6 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -83,13 +79,11 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -98,12 +92,10 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -113,7 +105,7 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,8 +121,6 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -142,7 +132,7 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("Invalid entity: flow.namespace: main_io.kestra.outsider_quattro_-1 - flow namespace is invalid");
|
||||
assertThat(out.toString(), containsString("Invalid entity: flow.namespace: io.kestra.outsider_quattro_-1 - flow namespace is invalid"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,8 +148,6 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -169,8 +157,8 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowValidateCommandTest {
|
||||
@Test
|
||||
@@ -22,28 +24,8 @@ class FlowValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
// github action kestra-io/validate-action requires being able to validate Flows from OSS CLI against a remote EE instance
|
||||
void runForEEInstance() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
|
||||
String[] args = {
|
||||
"--tenant",
|
||||
"some-ee-tenant",
|
||||
"--local",
|
||||
"src/test/resources/helper/include.yaml"
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("✓ - io.kestra.cli / include"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,10 +41,10 @@ class FlowValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - system / warning");
|
||||
assertThat(out.toString()).contains("⚠ - tasks[0] is deprecated");
|
||||
assertThat(out.toString()).contains("ℹ - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("✓ - system / warning"));
|
||||
assertThat(out.toString(), containsString("⚠ - tasks[0] is deprecated"));
|
||||
assertThat(out.toString(), containsString("ℹ - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,13 +10,15 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
public class SingleFlowCommandsTest {
|
||||
|
||||
class SingleFlowCommandsTest {
|
||||
|
||||
@Test
|
||||
void all() {
|
||||
URL flow = SingleFlowCommandsTest.class.getClassLoader().getResource("crudFlow/date.yml");
|
||||
URL flow = SingleFlowCommandsTest.class.getClassLoader().getResource("flows/quattro.yml");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
@@ -25,6 +27,19 @@ class SingleFlowCommandsTest {
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] deleteArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.outsider",
|
||||
"quattro"
|
||||
};
|
||||
PicocliRunner.call(FlowDeleteCommand.class, ctx, deleteArgs);
|
||||
|
||||
assertThat(out.toString(), containsString("Flow successfully deleted !"));
|
||||
out.reset();
|
||||
|
||||
String[] createArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
@@ -34,36 +49,23 @@ class SingleFlowCommandsTest {
|
||||
};
|
||||
PicocliRunner.call(FlowCreateCommand.class, ctx, createArgs);
|
||||
|
||||
assertThat(out.toString()).contains("Flow successfully created !");
|
||||
assertThat(out.toString(), containsString("Flow successfully created !"));
|
||||
|
||||
out.reset();
|
||||
|
||||
String[] updateArgs = {
|
||||
out.reset();String[] updateArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
flow.getPath(),
|
||||
"io.kestra.cli",
|
||||
"date"
|
||||
"io.kestra.outsider",
|
||||
"quattro"
|
||||
};
|
||||
PicocliRunner.call(FlowUpdateCommand.class, ctx, updateArgs);
|
||||
|
||||
assertThat(out.toString()).contains("Flow successfully updated !");
|
||||
|
||||
assertThat(out.toString(), containsString("Flow successfully updated !"));
|
||||
out.reset();
|
||||
|
||||
String[] deleteArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.cli",
|
||||
"date"
|
||||
};
|
||||
PicocliRunner.call(FlowDeleteCommand.class, ctx, deleteArgs);
|
||||
|
||||
assertThat(out.toString()).contains("Flow successfully deleted !");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -10,7 +10,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class TemplateValidateCommandTest {
|
||||
@Test
|
||||
@@ -26,9 +28,9 @@ class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flow"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,8 +46,6 @@ class TemplateValidateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -54,9 +54,9 @@ class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flow"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class FlowNamespaceCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class FlowNamespaceCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra flow namespace");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,10 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class FlowNamespaceUpdateCommandTest {
|
||||
@Test
|
||||
@@ -36,7 +39,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
|
||||
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -52,7 +55,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
|
||||
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,9 +81,9 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flows");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flows"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,7 +111,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("3 flow(s)");
|
||||
assertThat(out.toString(), containsString("3 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
@@ -122,7 +125,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -136,7 +139,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,8 +165,8 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,8 +195,8 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("io.kestra.override");
|
||||
assertThat(out.toString()).doesNotContain("io.kestra.cli");
|
||||
assertThat(out.toString(), containsString("io.kestra.override"));
|
||||
assertThat(out.toString(), not(containsString("io.kestra.cli")));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class NamespaceCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class NamespaceCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(NamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class NamespaceFilesCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class NamespaceFilesCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(NamespaceFilesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace files");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace files"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,8 +14,8 @@ import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class NamespaceFilesUpdateCommandTest {
|
||||
@Test
|
||||
@@ -31,8 +31,6 @@ class NamespaceFilesUpdateCommandTest {
|
||||
|
||||
String to = "/some/directory";
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -63,8 +61,6 @@ class NamespaceFilesUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -94,8 +90,6 @@ class NamespaceFilesUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class KvCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class KvCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(KvCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace kv");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace kv"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Map;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
class KvUpdateCommandTest {
|
||||
@Test
|
||||
@@ -28,8 +28,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -41,10 +39,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("string").get()).isEqualTo(new KVValue("stringValue"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("string").get()).isEqualTo("\"stringValue\"");
|
||||
assertThat(kvStore.getValue("string").get(), is(new KVValue("stringValue")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("string").get(), is("\"stringValue\""));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,8 +54,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -69,10 +65,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("int").get()).isEqualTo(new KVValue(1));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("int").get()).isEqualTo("1");
|
||||
assertThat(kvStore.getValue("int").get(), is(new KVValue(1)));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("int").get(), is("1"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,8 +80,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -99,10 +93,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("intStr").get()).isEqualTo(new KVValue("1"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("intStr").get()).isEqualTo("\"1\"");
|
||||
assertThat(kvStore.getValue("intStr").get(), is(new KVValue("1")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("intStr").get(), is("\"1\""));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,8 +108,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -127,10 +119,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("object").get()).isEqualTo(new KVValue(Map.of("some", "json")));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("object").get()).isEqualTo("{some:\"json\"}");
|
||||
assertThat(kvStore.getValue("object").get(), is(new KVValue(Map.of("some", "json"))));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("object").get(), is("{some:\"json\"}"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,8 +134,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -157,10 +147,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("objectStr").get()).isEqualTo(new KVValue("{\"some\":\"json\"}"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("objectStr").get()).isEqualTo("\"{\\\"some\\\":\\\"json\\\"}\"");
|
||||
assertThat(kvStore.getValue("objectStr").get(), is(new KVValue("{\"some\":\"json\"}")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("objectStr").get(), is("\"{\\\"some\\\":\\\"json\\\"}\""));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,8 +167,6 @@ class KvUpdateCommandTest {
|
||||
Files.write(file.toPath(), "{\"some\":\"json\",\"from\":\"file\"}".getBytes());
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -191,10 +179,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("objectFromFile").get()).isEqualTo(new KVValue(Map.of("some", "json", "from", "file")));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("objectFromFile").get()).isEqualTo("{some:\"json\",from:\"file\"}");
|
||||
assertThat(kvStore.getValue("objectFromFile").get(), is(new KVValue(Map.of("some", "json", "from", "file"))));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("objectFromFile").get(), is("{some:\"json\",from:\"file\"}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,8 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class PluginCommandTest {
|
||||
|
||||
@@ -20,35 +21,7 @@ class PluginCommandTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(PluginCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString()).contains("Usage: kestra plugins");
|
||||
assertThat(out.toString(), containsString("Usage: kestra plugins"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Additional Coverage:
|
||||
@Test
|
||||
void shouldListSubcommandsInHelp() throws Exception {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
PrintStream originalOut = System.out;
|
||||
System.setOut(new PrintStream(out));
|
||||
try {
|
||||
PluginCommand cmd = new PluginCommand();
|
||||
cmd.call();
|
||||
String output = out.toString();
|
||||
assertThat(output).contains("install");
|
||||
assertThat(output).contains("uninstall");
|
||||
assertThat(output).contains("list");
|
||||
assertThat(output).contains("doc");
|
||||
assertThat(output).contains("search");
|
||||
} finally {
|
||||
System.setOut(originalOut);
|
||||
}
|
||||
}
|
||||
|
||||
// Passes
|
||||
@Test
|
||||
void shouldNotLoadExternalPlugins() {
|
||||
PluginCommand cmd = new PluginCommand();
|
||||
assertThat(cmd.loadExternalPlugins()).isFalse();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,11 +17,12 @@ import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
class PluginDocCommandTest {
|
||||
|
||||
public static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.24.0-SNAPSHOT.jar";
|
||||
public static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.18.0-SNAPSHOT.jar";
|
||||
|
||||
@Test
|
||||
void run() throws IOException, URISyntaxException {
|
||||
@@ -43,16 +44,16 @@ class PluginDocCommandTest {
|
||||
|
||||
List<Path> files = Files.list(docPath).toList();
|
||||
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(files.getFirst().getFileName().toString()).isEqualTo("plugin-template-test");
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("plugin-template-test"));
|
||||
var directory = files.getFirst().toFile();
|
||||
assertThat(directory.isDirectory()).isTrue();
|
||||
assertThat(directory.listFiles().length).isEqualTo(3);
|
||||
assertThat(directory.isDirectory(), is(true));
|
||||
assertThat(directory.listFiles().length, is(3));
|
||||
|
||||
var readme = directory.toPath().resolve("index.md");
|
||||
var readmeContent = new String(Files.readAllBytes(readme));
|
||||
|
||||
assertThat(readmeContent).contains("""
|
||||
assertThat(readmeContent, containsString("""
|
||||
---
|
||||
title: Template test
|
||||
description: "Plugin template for Kestra"
|
||||
@@ -60,17 +61,18 @@ class PluginDocCommandTest {
|
||||
|
||||
---
|
||||
# Template test
|
||||
""");
|
||||
"""));
|
||||
|
||||
assertThat(readmeContent).contains("""
|
||||
assertThat(readmeContent, containsString("""
|
||||
Plugin template for Kestra
|
||||
|
||||
This is a more complex description of the plugin.
|
||||
|
||||
This is in markdown and will be inline inside the plugin page.
|
||||
""");
|
||||
"""));
|
||||
|
||||
assertThat(readmeContent).contains("""
|
||||
assertThat(readmeContent, containsString(
|
||||
"""
|
||||
/> Subgroup title
|
||||
|
||||
Subgroup description
|
||||
@@ -87,20 +89,20 @@ class PluginDocCommandTest {
|
||||
\s
|
||||
* [Reporting](./guides/reporting.md)
|
||||
\s
|
||||
""");
|
||||
"""));
|
||||
|
||||
// check @PluginProperty from an interface
|
||||
var task = directory.toPath().resolve("tasks/io.kestra.plugin.templates.ExampleTask.md");
|
||||
String taskDoc = new String(Files.readAllBytes(task));
|
||||
assertThat(taskDoc).contains("""
|
||||
assertThat(taskDoc, containsString("""
|
||||
### `example`
|
||||
* **Type:** ==string==
|
||||
* **Dynamic:** ✔️
|
||||
* **Required:** ❌
|
||||
|
||||
**Example interface**
|
||||
""");
|
||||
assertThat(taskDoc).contains("""
|
||||
"""));
|
||||
assertThat(taskDoc, containsString("""
|
||||
### `from`
|
||||
* **Type:**
|
||||
* ==string==
|
||||
@@ -108,12 +110,12 @@ class PluginDocCommandTest {
|
||||
* [==Example==](#io.kestra.core.models.annotations.example)
|
||||
* **Dynamic:** ✔️
|
||||
* **Required:** ✔️
|
||||
""");
|
||||
"""));
|
||||
|
||||
var authenticationGuide = directory.toPath().resolve("guides/authentication.md");
|
||||
assertThat(new String(Files.readAllBytes(authenticationGuide))).contains("This is how to authenticate for this plugin:");
|
||||
assertThat(new String(Files.readAllBytes(authenticationGuide)), containsString("This is how to authenticate for this plugin:"));
|
||||
var reportingGuide = directory.toPath().resolve("guides/reporting.md");
|
||||
assertThat(new String(Files.readAllBytes(reportingGuide))).contains("This is the reporting of the plugin:");
|
||||
assertThat(new String(Files.readAllBytes(reportingGuide)), containsString("This is the reporting of the plugin:"));
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user