Compare commits

..

17 Commits

Author SHA1 Message Date
YannC
c757827b9d chore: upgrade to version 0.16.2 2024-04-22 18:49:39 +02:00
YannC
2a5c82b2a3 fix(scheduler): better handling of locked triggers (#3603) 2024-04-22 18:47:17 +02:00
Loïc Mathieu
15bb0ee65b fea(core): mandate that both key and value are present for labels 2024-04-22 18:47:05 +02:00
Ludovic DEHON
d06e8dad6e fix(core): handle secret in trigger 2024-04-22 18:46:57 +02:00
brian.mulier
e9f5752278 fix(cli): API commands work against a pre-micronaut-upgrade server 2024-04-22 18:46:50 +02:00
brian.mulier
c16c5ddaf5 chore(deps): update ui-libs to 0.0.43 2024-04-22 18:46:44 +02:00
Ludovic DEHON
b706ca1911 fix(ui): flow full revision is truncated
close #3478
2024-04-22 18:46:36 +02:00
brian.mulier
366246e0a8 fix(ui): Gantt clicks are working again 2024-04-22 18:46:28 +02:00
brian.mulier
dcea4551cc fix(ui): prevent editor shrink on loading task runner doc 2024-04-22 18:46:22 +02:00
brian.mulier
c0ff6fcc52 fix(tests): add real launch to outputDirDisabled test for task runners 2024-04-22 18:46:16 +02:00
Florian Hussonnois
0525e7eaca fix(core): VariableRenderer should expose alternativeRender 2024-04-22 18:46:06 +02:00
YannC
d1fb098f5b chore(version): update to version 'v0.16.1' 2024-04-15 17:04:32 +02:00
YannC
9703cc48cb feat(ui): click anywhere on the row to open logs of a task in Gantt vue 2024-04-15 17:02:18 +02:00
YannC
31c3e5a4f6 feat(ui): set plugins menu back in the UI (#3558) 2024-04-15 17:02:13 +02:00
brian.mulier
bda52eb49d fix(ui): use new Monaco API for decorations to prevent editor from disappearing
closes #3536
2024-04-15 17:02:00 +02:00
brian.mulier
8a54b8ec7f fix(validate): restore ability to run validate command without any configuration 2024-04-15 17:01:34 +02:00
Loïc Mathieu
c34c82c1f9 fix: downgrade Micronaut
Go back to previously working version 4.3.4 as 4.3.7 have a bug when randomly the routeMatch is null on the security filter.
See https://github.com/kestra-io/kestra-ee/issues/1085
2024-04-15 17:01:24 +02:00
2993 changed files with 61305 additions and 214731 deletions

View File

@@ -1,82 +0,0 @@
FROM ubuntu:24.04
ARG BUILDPLATFORM
ARG DEBIAN_FRONTEND=noninteractive
USER root
WORKDIR /root
RUN apt update && apt install -y \
apt-transport-https ca-certificates gnupg curl wget git zip unzip less zsh net-tools iputils-ping jq lsof
ENV HOME="/root"
# --------------------------------------
# Git
# --------------------------------------
# Need to add the devcontainer workspace folder as a safe directory to enable git
# version control system to be enabled in the containers file system.
RUN git config --global --add safe.directory "/workspaces/kestra"
# --------------------------------------
# --------------------------------------
# Oh my zsh
# --------------------------------------
RUN sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" -- \
-t robbyrussell \
-p git -p node -p npm
ENV SHELL=/bin/zsh
# --------------------------------------
# --------------------------------------
# Java
# --------------------------------------
ARG OS_ARCHITECTURE
RUN mkdir -p /usr/java
RUN echo "Building on platform: $BUILDPLATFORM"
RUN case "$BUILDPLATFORM" in \
"linux/amd64") OS_ARCHITECTURE="x64_linux" ;; \
"linux/arm64") OS_ARCHITECTURE="aarch64_linux" ;; \
"darwin/amd64") OS_ARCHITECTURE="x64_mac" ;; \
"darwin/arm64") OS_ARCHITECTURE="aarch64_mac" ;; \
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
esac && \
wget "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz" && \
mv OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz openjdk-21.0.7.tar.gz
RUN tar -xzvf openjdk-21.0.7.tar.gz && \
mv jdk-21.0.7+6 jdk-21 && \
mv jdk-21 /usr/java/
ENV JAVA_HOME=/usr/java/jdk-21
ENV PATH="$PATH:$JAVA_HOME/bin"
# Will load a custom configuration file for Micronaut
ENV MICRONAUT_ENVIRONMENTS=local,override
# Sets the path where you save plugins as Jar and is loaded during the startup process
ENV KESTRA_PLUGINS_PATH="/workspaces/kestra/local/plugins"
# --------------------------------------
# --------------------------------------
# Node.js
# --------------------------------------
RUN curl -fsSL https://deb.nodesource.com/setup_22.x -o nodesource_setup.sh \
&& bash nodesource_setup.sh && apt install -y nodejs
# Increases JavaScript heap memory to 4GB to prevent heap out of error during startup
ENV NODE_OPTIONS=--max-old-space-size=4096
# --------------------------------------
# --------------------------------------
# Python
# --------------------------------------
RUN apt install -y python3 pip python3-venv
# --------------------------------------
# --------------------------------------
# SSH
# --------------------------------------
RUN mkdir -p ~/.ssh
RUN touch ~/.ssh/config
RUN echo "Host github.com" >> ~/.ssh/config \
&& echo " IdentityFile ~/.ssh/id_ed25519" >> ~/.ssh/config
RUN touch ~/.ssh/id_ed25519
# --------------------------------------

View File

@@ -1,151 +0,0 @@
# Kestra Devcontainer
This devcontainer provides a quick and easy setup for anyone using VSCode to get up and running quickly with this project to start development on either the frontend or backend. It bootstraps a docker container for you to develop inside of without the need to manually setup the environment.
---
## INSTRUCTIONS
### Setup:
Take a look at this guide to get an idea of what the setup is like as this devcontainer setup follows this approach: https://kestra.io/docs/getting-started/contributing
Once you have this repo cloned to your local system, you will need to install the VSCode extension [Remote Development](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack).
Then run the following command from the command palette:
`Dev Containers: Open Folder in Container...` and select your Kestra root folder.
This will then put you inside a docker container ready for development.
NOTE: you'll need to wait for the gradle build to finish and compile Java files but this process should happen automatically within VSCode.
In the meantime, you can move onto the next step...
---
### Requirements
- Java 21 (LTS versions).
> ⚠️ Java 24 and above are not supported yet and will fail with `invalid source release: 21`.
- Gradle (comes with wrapper `./gradlew`)
- Docker (optional, for running Kestra in containers)
### Development:
- (Optional) By default, your dev server will target `localhost:8080`. If your backend is running elsewhere, you can create `.env.development.local` under `ui` folder with this content:
```
VITE_APP_API_URL={myApiUrl}
```
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
Now you have two choices:
`Local mode`:
Runs the Kestra server in local mode which uses a H2 database, so this is the only config you'd need:
```yaml
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
You can then open a new terminal and run the following command to start the backend server: `./gradlew runLocal`
`Standalone mode`:
Runs in standalone mode which uses Postgres. Make sure to have a local Postgres instance already running on localhost:
```yaml
kestra:
repository:
type: postgres
storage:
type: local
local:
base-path: "/app/storage"
queue:
type: postgres
tasks:
tmp-dir:
path: /tmp/kestra-wd/tmp
anonymous-usage-report:
enabled: false
datasources:
postgres:
# It is important to note that you must use the "host.docker.internal" host when connecting to a docker container outside of your devcontainer as attempting to use localhost will only point back to this devcontainer.
url: jdbc:postgresql://host.docker.internal:5432/kestra
driverClassName: org.postgresql.Driver
username: kestra
password: k3str4
flyway:
datasources:
postgres:
enabled: true
locations:
- classpath:migrations/postgres
# We must ignore missing migrations as we may delete the wrong ones or delete those that are not used anymore.
ignore-migration-patterns: "*:missing,*:future"
out-of-order: true
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
Then add the following settings to the `.vscode/launch.json` file:
```json
{
"version": "0.2.0",
"configurations": [
{
"type": "java",
"name": "Kestra Standalone",
"request": "launch",
"mainClass": "io.kestra.cli.App",
"projectName": "cli",
"args": "server standalone"
}
]
}
```
You can then use the VSCode `Run and Debug` extension to start the Kestra server.
Additionally, if you're doing frontend development, you can run `npm run dev` from the `ui` folder after having the above running (which will provide a backend) to access your application from `localhost:5173`. This has the benefit to watch your changes and hot-reload upon doing frontend changes.
#### Plugins
If you want your plugins to be loaded inside your devcontainer, point the `source` field to a folder containing jars of the plugins you want to embed in the following snippet in `devcontainer.json`:
```
"mounts": [
{
"source": "/absolute/path/to/your/local/jar/plugins/folder",
"target": "/workspaces/kestra/local/plugins",
"type": "bind"
}
],
```
---
### GIT
If you want to commit to GitHub, make sure to navigate to the `~/.ssh` folder and either create a new SSH key or override the existing `id_ed25519` file and paste an existing SSH key from your local machine into this file. You will then need to change the permissions of the file by running: `chmod 600 id_ed25519`. This will allow you to then push to GitHub.
---

View File

@@ -1,46 +0,0 @@
{
"name": "kestra",
"build": {
"context": ".",
"dockerfile": "Dockerfile"
},
"workspaceFolder": "/workspaces/kestra",
"forwardPorts": [5173, 8080],
"customizations": {
"vscode": {
"settings": {
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/bin/zsh"
}
},
"workbench.iconTheme": "vscode-icons",
"editor.tabSize": 4,
"editor.formatOnSave": true,
"files.insertFinalNewline": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"telemetry.telemetryLevel": "off",
"editor.bracketPairColorization.enabled": true,
"editor.guides.bracketPairs": "active"
},
"extensions": [
"redhat.vscode-yaml",
"dbaeumer.vscode-eslint",
"vscode-icons-team.vscode-icons",
"eamodio.gitlens",
"esbenp.prettier-vscode",
"aaron-bond.better-comments",
"codeandstuff.package-json-upgrade",
"andys8.jest-snippets",
"oderwat.indent-rainbow",
"evondev.indent-rainbow-palettes",
"formulahendry.auto-rename-tag",
"IronGeek.vscode-env",
"yoavbls.pretty-ts-errors",
"github.vscode-github-actions",
"vscjava.vscode-java-pack",
"docker.docker"
]
}
}
}

View File

@@ -1,7 +1,7 @@
## Code of Conduct
This project and everyone participating in it is governed by the
[Kestra Code of Conduct](https://github.com/kestra-io/kestra/blob/develop/.github/CODE_OF_CONDUCT.md).
[Kestra Code of Conduct](https://github.com/kestra-io/kestrablob/master/CODE_OF_CONDUCT.md).
By participating, you are expected to uphold this code. Please report unacceptable behavior
to <hello@kestra.io>.
@@ -31,16 +31,12 @@ Watch out for duplicates! If you are creating a new issue, please check existing
#### Requirements
The following dependencies are required to build Kestra locally:
- Java 21+
- Node 18+ and npm
- Java 17+, Kestra runs on Java 11 but we hit a Java compiler bug fixed in Java 17
- Node 14+ and npm
- Python 3, pip and python venv
- Docker & Docker Compose
- an IDE (Intellij IDEA, Eclipse or VS Code)
Thanks to the Kestra community, if using VSCode, you can also start development on either the frontend or backend with a bootstrapped docker container without the need to manually set up the environment.
Check out the [README](../.devcontainer/README.md) for set-up instructions and the associated [Dockerfile](../.devcontainer/Dockerfile) in the respository to get started.
To start contributing:
- [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the repository
- Clone the fork on your workstation:
@@ -50,23 +46,20 @@ git clone git@github.com:{YOUR_USERNAME}/kestra.git
cd kestra
```
#### Develop on the backend
#### Develop backend
The backend is made with [Micronaut](https://micronaut.io).
Open the cloned repository in your favorite IDE. In most of decent IDEs, Gradle build will be detected and all dependencies will be downloaded.
You can also build it from a terminal using `./gradlew build`, the Gradle wrapper will download the right Gradle version to use.
- You may need to enable java annotation processors since we are using them.
- On IntelliJ IDEA, click on **Run -> Edit Configurations -> + Add new Configuration** to create a run configuration to start Kestra.
- The main class is `io.kestra.cli.App` from module `kestra.cli.main`.
- Pass as program arguments the server you want to work with, for example `server local` will start the [standalone local](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies). You can also use `server standalone` and use the provided `docker-compose-ci.yml` Docker compose file to start a standalone server with a real database as a backend that would need to be configured properly.
- Configure the following environment variables:
- `MICRONAUT_ENVIRONMENTS`: can be set to any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`.
- `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on startup.
- See the screenshot below for an example: ![Intellij IDEA Configuration ](run-app.png)
- If you encounter **JavaScript memory heap out** error during startup, configure `NODE_OPTIONS` environment variable with some large value.
- Example `NODE_OPTIONS: --max-old-space-size=4096` or `NODE_OPTIONS: --max-old-space-size=8192` ![Intellij IDEA Configuration ](node_option_env_var.png)
- The server starts by default on port 8080 and is reachable on `http://localhost:8080`
- You may need to enable java annotation processors since we are using it a lot.
- The main class is `io.kestra.cli.App` from module `kestra.cli.main`
- Pass as program arguments the server you want to develop, for example `server local` will start the [standalone local](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies)
- ![Intellij Idea Configuration ](https://user-images.githubusercontent.com/2064609/161399626-1b681add-cfa8-4e0e-a843-2631cc59758d.png) Intellij Idea configuration can be found in screenshot below.
- `MICRONAUT_ENVIRONMENTS`: can be set any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`
- `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on the startup.
- You can also use the gradle task `./gradlew runLocal` that will run a standalone server with `MICRONAUT_ENVIRONMENTS=override` and plugins path `local/plugins`
- The server start by default on port 8080 and is reachable on `http://localhost:8080`
If you want to launch all tests, you need Python and some packages installed on your machine, on Ubuntu you can install them with:
@@ -76,20 +69,21 @@ python3 -m pip install virtualenv
```
#### Develop on the frontend
#### Develop frontend
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
- `npm install`
- `npm install --force` (force is need because of some conflicting package)
- create a files `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
- `npm run dev` will start the development server with hot reload.
- The server start by default on port 5173 and is reachable on `http://localhost:5173`
- The server start by default on port 8090 and is reachable on `http://localhost:5173`
- You can run `npm run build` in order to build the front-end that will be delivered from the backend (without running the `npm run dev`) above.
Now, you need to start a backend server, you could:
- start a [local server](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies) without a database using this docker-compose file already configured with CORS enabled:
- start a [local server](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies) without database using this docker-compose file already configured with CORS enabled:
```yaml
services:
kestra:
image: kestra/kestra:latest
image: kestra/kestra:latest-full
user: "root"
command: server local
environment:
@@ -105,7 +99,7 @@ services:
ports:
- "8080:8080"
```
- start the [Develop backend](#develop-backend) from your IDE, you need to configure CORS restrictions when using the local development npm server, changing the backend configuration allowing the http://localhost:5173 origin in `cli/src/main/resources/application-override.yml`
- start the [Develop backend](#develop-backend) from your IDE and you need to configure CORS restrictions when using the local development npm server, changing the backend configuration allowing the http://localhost:5173 origin in `cli/src/main/resources/application-override.yml`
```yaml
micronaut:
@@ -139,4 +133,4 @@ A complete documentation for developing plugin can be found [here](https://kestr
### Improving The Documentation
The main documentation is located in a separate [repository](https://github.com/kestra-io/kestra.io).
For tasks documentation, they are located directly in the Java source, using [Swagger annotations](https://github.com/swagger-api/swagger-core/wiki/Swagger-2.X---Annotations) (Example: [for Bash tasks](https://github.com/kestra-io/kestra/blob/develop/core/src/main/java/io/kestra/core/tasks/scripts/AbstractBash.java))
For tasks documentation, they are located directly on Java source using [Swagger annotations](https://github.com/swagger-api/swagger-core/wiki/Swagger-2.X---Annotations) (Example: [for Bash tasks](https://github.com/kestra-io/kestra/blob/develop/core/src/main/java/io/kestra/core/tasks/scripts/AbstractBash.java))

54
.github/ISSUE_TEMPLATE/blueprint.yml vendored Normal file
View File

@@ -0,0 +1,54 @@
name: Blueprint
description: Add a new blueprint
body:
- type: markdown
attributes:
value: |
Please fill out all the fields listed below. This will help us review and add your blueprint faster.
- type: textarea
attributes:
label: Blueprint title
description: A title briefly describing what the blueprint does, ideally in a verb phrase + noun format.
placeholder: E.g. "Upload a file to service X, then run Y and Z"
validations:
required: true
- type: textarea
attributes:
label: Source code
description: Flow code that will appear on the Blueprint page.
placeholder: |
```yaml
id: yourFlowId
namespace: blueprint
tasks:
- id: taskName
type: task_type
```
validations:
required: true
- type: textarea
attributes:
label: About this blueprint
description: "A concise markdown documentation about the blueprint's configuration and usage."
placeholder: |
E.g. "This flow downloads a file and uploads it to an S3 bucket. This flow assumes AWS credentials stored as environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`."
validations:
required: false
- type: textarea
attributes:
label: Tags (optional)
description: Blueprint categories such as Ingest, Transform, Analyze, Python, Docker, AWS, GCP, Azure, etc.
placeholder: |
- Ingest
- Transform
- AWS
validations:
required: false
labels:
- blueprint

View File

@@ -4,7 +4,9 @@ body:
- type: markdown
attributes:
value: |
Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack).
Thanks for reporting an issue! Please provide a [Minima Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example)
and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs.
NOTE: If your issue is more of a question, please ping us directly on [Slack](https://kestra.io/slack).
- type: textarea
attributes:
label: Describe the issue
@@ -17,10 +19,10 @@ body:
label: Environment
description: Environment information where the problem occurs.
value: |
- Kestra Version: develop
- Kestra Version:
- Operating System (OS/Docker/Kubernetes):
- Java Version (if you don't run kestra in Docker):
validations:
required: false
labels:
- bug
- area/backend
- area/frontend
- bug

View File

@@ -1,4 +1,7 @@
contact_links:
- name: GitHub Discussions
url: https://github.com/kestra-io/kestra/discussions
about: Ask questions about Kestra on Github
- name: Chat
url: https://kestra.io/slack
about: Chat with us on Slack.

View File

@@ -1,13 +1,15 @@
name: Feature request
description: Create a new feature request
body:
- type: markdown
attributes:
value: |
Please describe the feature you want for Kestra to implement, before that check if there is already an existing issue to add it.
- type: textarea
attributes:
label: Feature description
placeholder: Tell us more about your feature request
placeholder: Tell us what feature you would like for Kestra to have and what problem is it going to solve
validations:
required: true
labels:
- enhancement
- area/backend
- area/frontend

8
.github/ISSUE_TEMPLATE/other.yml vendored Normal file
View File

@@ -0,0 +1,8 @@
name: Other
description: Something different
body:
- type: textarea
attributes:
label: Issue description
validations:
required: true

View File

@@ -1,29 +0,0 @@
name: 'Load Kestra Plugin List'
description: 'Composite action to load list of plugins'
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: true
plugin-file:
description: "File of the plugins"
default: './.plugins'
required: true
outputs:
plugins:
description: "List of all Kestra plugins"
value: ${{ steps.plugins.outputs.plugins }}
repositories:
description: "List of all Kestra repositories of plugins"
value: ${{ steps.plugins.outputs.repositories }}
runs:
using: composite
steps:
- name: Get Plugins List
id: plugins
shell: bash
run: |
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | cut -d':' -f2- | xargs || echo '');
REPOSITORIES=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | cut -d':' -f1 | uniq | sort | xargs || echo '')
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
echo "repositories=$REPOSITORIES" >> $GITHUB_OUTPUT

View File

@@ -1,20 +0,0 @@
name: 'Setup vars'
description: 'Composite action to setup common vars'
outputs:
tag:
description: "Git tag"
value: ${{ steps.vars.outputs.tag }}
commit:
description: "Git commit"
value: ${{ steps.vars.outputs.commit }}
runs:
using: composite
steps:
# Setup vars
- name: Set variables
id: vars
shell: bash
run: |
TAG=${GITHUB_REF#refs/*/}
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "commit=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_OUTPUT

View File

@@ -1,50 +1,33 @@
# See GitHub's docs for more information on this file:
# https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
# Check for updates to GitHub Actions every week
interval: "weekly"
day: "wednesday"
time: "08:00"
timezone: "Europe/Paris"
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
open-pull-requests-limit: 50
# Maintain dependencies for Gradle modules
- package-ecosystem: "gradle"
directory: "/"
schedule:
# Check for updates to Gradle modules every week
interval: "weekly"
day: "wednesday"
time: "08:00"
timezone: "Europe/Paris"
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
open-pull-requests-limit: 50
# Maintain dependencies for NPM modules
# Maintain dependencies for Npm modules
- package-ecosystem: "npm"
directory: "/ui"
schedule:
# Check for updates to Npm modules every week
interval: "weekly"
day: "wednesday"
time: "08:00"
timezone: "Europe/Paris"
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
ignore:
# Ignore updates of version 1.x, as we're using the beta of 2.x (still in beta)
- dependency-name: "vue-virtual-scroller"
versions:
- "1.x"
# Ignore updates to monaco-yaml, version is pinned to 5.3.1 due to patch-package script additions
- dependency-name: "monaco-yaml"
versions:
- ">=5.3.2"
open-pull-requests-limit: 50

Binary file not shown.

Before

Width:  |  Height:  |  Size: 130 KiB

View File

@@ -1,13 +1,12 @@
<!-- Thanks for submitting a Pull Request to Kestra. To help us review your contribution, please follow the guidelines below:
<!-- Thanks for submitting a Pull Request to kestra. To help us review your contribution, please follow the guidelines below:
- Make sure that your commits follow the [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/) specification e.g. `feat(ui): add a new navigation menu item` or `fix(core): fix a bug in the core model` or `docs: update the README.md`. This will help us automatically generate the changelog.
- The title should briefly summarize the proposed changes.
- Provide a short overview of the change and the value it adds.
- Share a flow example to help the reviewer understand and QA the change.
- Use "closes" to automatically close an issue. For example, `closes #1234` will close issue #1234. -->
- Use "close" to automatically close an issue. For example, `close #1234` will close issue #1234. -->
### What changes are being made and why?
<!-- Please include a brief summary of the changes included in this PR e.g. closes #1234. -->
---

BIN
.github/run-app.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 210 KiB

View File

@@ -1,67 +0,0 @@
name: Auto-Translate UI keys and create PR
on:
schedule:
- cron: "0 9-21/3 * * *" # Every 3 hours from 9 AM to 9 PM
workflow_dispatch:
inputs:
retranslate_modified_keys:
description: "Whether to re-translate modified keys even if they already have translations."
type: choice
options:
- "false"
- "true"
default: "false"
required: false
jobs:
translations:
name: Translations
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v5
name: Checkout
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: "3.x"
- name: Install Python dependencies
run: pip install gitpython openai
- name: Generate translations
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
- name: Set up Node
uses: actions/setup-node@v5
with:
node-version: "20.x"
- name: Set up Git
run: |
git config --global user.name "GitHub Action"
git config --global user.email "actions@github.com"
- name: Commit and create PR
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH_NAME="chore/update-translations-$(date +%s)"
git checkout -b $BRANCH_NAME
git add ui/src/translations/*.json
if git diff --cached --quiet; then
echo "No changes to commit. Exiting with success."
exit 0
fi
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
gh pr create --title "Translations from en.json" --body $'This PR was created automatically by a GitHub Action.\n\nSomeone from the @kestra-io/frontend team needs to review and merge.' --base ${{ github.ref_name }} --head $BRANCH_NAME
- name: Check keys matching
run: node ui/src/translations/check.js

View File

@@ -6,11 +6,11 @@
name: "CodeQL"
on:
push:
branches: [develop]
schedule:
- cron: '0 5 * * 1'
workflow_dispatch: {}
jobs:
analyze:
name: Analyze
@@ -27,7 +27,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v4
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
@@ -50,24 +50,14 @@ jobs:
# Set up JDK
- name: Set up JDK
uses: actions/setup-java@v5
if: ${{ matrix.language == 'java' }}
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 21
- name: Setup gradle
if: ${{ matrix.language == 'java' }}
uses: gradle/actions/setup-gradle@v4
- name: Build with Gradle
if: ${{ matrix.language == 'java' }}
run: ./gradlew testClasses -x :ui:assembleFrontend
java-version: 17
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
if: ${{ matrix.language != 'java' }}
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.

View File

@@ -1,86 +0,0 @@
name: 'E2E tests revival'
description: 'New E2E tests implementation started by Roman. Based on playwright in npm UI project, tests Kestra OSS develop docker image. These tests are written from zero, lets make them unflaky from the start!.'
on:
schedule:
- cron: "0 * * * *" # Every hour
workflow_call:
inputs:
noInputYet:
description: 'not input yet.'
required: false
type: string
default: "no input"
workflow_dispatch:
inputs:
noInputYet:
description: 'not input yet.'
required: false
type: string
default: "no input"
jobs:
check:
timeout-minutes: 15
runs-on: ubuntu-latest
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
steps:
- name: Login to DockerHub
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Checkout kestra
uses: actions/checkout@v5
with:
path: kestra
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
name: Setup - Build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
- name: Install Npm dependencies
run: |
cd kestra/ui
npm i
npx playwright install --with-deps chromium
- name: Run E2E Tests
run: |
cd kestra
sh build-and-start-e2e-tests.sh
- name: Upload Playwright Report as Github artifact
# 'With this report, you can analyze locally the results of the tests. see https://playwright.dev/docs/ci-intro#html-report'
uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
with:
name: playwright-report
path: kestra/ui/playwright-report/
retention-days: 7
# Allure check
# TODO I don't know what it should do
# - uses: rlespinasse/github-slug-action@v5
# name: Allure - Generate slug variables
#
# - name: Allure - Publish report
# uses: andrcuns/allure-publish-action@v2.9.0
# if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
# continue-on-error: true
# env:
# GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
# JAVA_HOME: /usr/lib/jvm/default-jvm/
# with:
# storageType: gcs
# resultsGlob: "**/build/allure-results"
# bucket: internal-kestra-host
# baseUrl: "https://internal.dev.kestra.io"
# prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
# copyLatest: true
# ignoreMissingResults: true

View File

@@ -1,74 +0,0 @@
name: Run Gradle Release for Kestra Plugins
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0)'
required: true
type: string
nextVersion:
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
required: true
type: string
dryRun:
description: 'Use DRY_RUN mode'
required: false
default: 'false'
jobs:
release:
name: Release plugins
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
id: plugins-list
with:
plugin-version: 'LATEST'
- name: 'Configure Git'
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
if: ${{ github.event.inputs.dryRun == 'false' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/release-plugins.sh;
./dev-tools/release-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--next-version=${{github.event.inputs.nextVersion}} \
--yes \
${{ steps.plugins-list.outputs.repositories }}
- name: Run Gradle Release (DRY_RUN)
if: ${{ github.event.inputs.dryRun == 'true' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/release-plugins.sh;
./dev-tools/release-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--next-version=${{github.event.inputs.nextVersion}} \
--dry-run \
--yes \
${{ steps.plugins-list.outputs.repositories }}

View File

@@ -1,84 +0,0 @@
name: Run Gradle Release
run-name: "Releasing Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0)'
required: true
type: string
nextVersion:
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
required: true
type: string
env:
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
NEXT_VERSION: "${{ github.event.inputs.nextVersion }}"
jobs:
release:
name: Release Kestra
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/develop'
steps:
# Checks
- name: Check Inputs
run: |
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0$ ]]; then
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0$"
exit 1
fi
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
exit 1;
fi
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
path: kestra
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
caches-enabled: true
- name: Configure Git
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
cd kestra
# Create and push release branch
git checkout -b "$PUSH_RELEASE_BRANCH";
git push -u origin "$PUSH_RELEASE_BRANCH";
# Run gradle release
git checkout develop;
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
./gradlew release -Prelease.useAutomaticVersion=true \
-Prelease.releaseVersion="${RELEASE_VERSION}" \
-Prelease.newVersion="${NEXT_VERSION}" \
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}" \
-Prelease.failOnSnapshotDependencies=false
else
./gradlew release -Prelease.useAutomaticVersion=true \
-Prelease.releaseVersion="${RELEASE_VERSION}" \
-Prelease.newVersion="${NEXT_VERSION}" \
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}"
fi

View File

@@ -1,76 +1,432 @@
name: Main Workflow
name: Main
on:
workflow_dispatch:
inputs:
skip-test:
description: 'Skip test'
type: choice
required: true
default: 'false'
options:
- "true"
- "false"
plugin-version:
description: "plugins version"
required: false
type: string
push:
branches:
- master
- main
- releases/*
- release
- develop
tags:
- v*
pull_request:
branches:
- master
- release
- develop
repository_dispatch:
types: [ rebuild ]
workflow_dispatch:
inputs:
skip-test:
description: 'Skip test'
required: false
type: string
default: "false"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-main
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
tests:
name: Execute tests
uses: ./.github/workflows/workflow-test.yml
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
with:
report-status: false
check:
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
name: Check & Publish
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
release:
name: Release
needs: [tests]
if: "!failure() && !cancelled() && !startsWith(github.ref, 'refs/heads/releases')"
uses: ./.github/workflows/workflow-release.yml
with:
plugin-version: ${{ inputs.plugin-version != '' && inputs.plugin-version || (github.ref == 'refs/heads/develop' && 'LATEST-SNAPSHOT' || 'LATEST') }}
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
- uses: actions/setup-python@v5
with:
python-version: '3.x'
architecture: 'x64'
- uses: actions/setup-node@v4
with:
node-version: '18'
check-latest: true
# Services
- name: Build the docker-compose stack
run: docker compose -f docker-compose-ci.yml up -d
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
# Caches
- name: Gradle cache
uses: actions/cache@v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle*.properties') }}
restore-keys: |
${{ runner.os }}-gradle-
- name: Npm cache
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-npm-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-npm-
- name: Node cache
uses: actions/cache@v4
with:
path: node
key: ${{ runner.os }}-node-${{ hashFiles('ui/*.gradle') }}
restore-keys: |
${{ runner.os }}-node-
- name: SonarCloud cache
uses: actions/cache@v4
with:
path: ~/.sonar/cache
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar
# JDK
- name: Set up JDK
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 17
- name: Validate Gradle wrapper
uses: gradle/wrapper-validation-action@v2
# Gradle check
- name: Build with Gradle
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
run: |
python3 -m pip install virtualenv
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
./gradlew check jacoco javadoc --no-daemon --priority=normal
# report test
- name: Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure()
with:
report_paths: '**/build/test-results/**/TEST-*.xml'
- name: Analyze with Sonar
if: ${{ env.SONAR_TOKEN != 0 }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
run: ./gradlew sonar --info
# Codecov
- uses: codecov/codecov-action@v4
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
with:
token: ${{ secrets.CODECOV_TOKEN }}
# Shadow Jar
- name: Build jars
run: ./gradlew executableJar --no-daemon --priority=normal
# Upload artifacts
- name: Upload jar
uses: actions/upload-artifact@v4
with:
name: jar
path: build/libs/
- name: Upload Executable
uses: actions/upload-artifact@v4
with:
name: exe
path: build/executable/
# GitHub Release
- name: Create GitHub release
uses: "marvinpinto/action-automatic-releases@latest"
if: startsWith(github.ref, 'refs/tags/v')
continue-on-error: true
with:
repo_token: "${{ secrets.GITHUB_TOKEN }}"
prerelease: false
files: |
build/executable/*
- name: Flow to add BC
if: startsWith(github.ref, 'refs/tags/v')
continue-on-error: true
run: |
curl --location "http://18.153.185.126:8080/api/v1/executions/webhook/product/release_notes/${{secrets.KESTRA_WEBHOOK_KEY}}" \
--header 'Content-Type: application/json'
docker:
name: Publish docker
runs-on: ubuntu-latest
needs: check
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/release' || startsWith(github.ref, 'refs/tags/v')
strategy:
matrix:
image:
- name: ""
plugins: ""
packages: ""
python-libs: ""
- name: "-full"
plugins: >-
io.kestra.plugin:plugin-airbyte:LATEST
io.kestra.plugin:plugin-amqp:LATEST
io.kestra.plugin:plugin-ansible:LATEST
io.kestra.plugin:plugin-aws:LATEST
io.kestra.plugin:plugin-azure:LATEST
io.kestra.plugin:plugin-cassandra:LATEST
io.kestra.plugin:plugin-cloudquery:LATEST
io.kestra.plugin:plugin-compress:LATEST
io.kestra.plugin:plugin-couchbase:LATEST
io.kestra.plugin:plugin-crypto:LATEST
io.kestra.plugin:plugin-databricks:LATEST
io.kestra.plugin:plugin-dataform:LATEST
io.kestra.plugin:plugin-dbt:LATEST
io.kestra.plugin:plugin-debezium-mysql:LATEST
io.kestra.plugin:plugin-debezium-postgres:LATEST
io.kestra.plugin:plugin-debezium-sqlserver:LATEST
io.kestra.plugin:plugin-docker:LATEST
io.kestra.plugin:plugin-elasticsearch:LATEST
io.kestra.plugin:plugin-fivetran:LATEST
io.kestra.plugin:plugin-fs:LATEST
io.kestra.plugin:plugin-gcp:LATEST
io.kestra.plugin:plugin-git:LATEST
io.kestra.plugin:plugin-googleworkspace:LATEST
io.kestra.plugin:plugin-hightouch:LATEST
io.kestra.plugin:plugin-jdbc-as400:LATEST
io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
io.kestra.plugin:plugin-jdbc-db2:LATEST
io.kestra.plugin:plugin-jdbc-duckdb:LATEST
io.kestra.plugin:plugin-jdbc-druid:LATEST
io.kestra.plugin:plugin-jdbc-mysql:LATEST
io.kestra.plugin:plugin-jdbc-oracle:LATEST
io.kestra.plugin:plugin-jdbc-pinot:LATEST
io.kestra.plugin:plugin-jdbc-postgres:LATEST
io.kestra.plugin:plugin-jdbc-redshift:LATEST
io.kestra.plugin:plugin-jdbc-rockset:LATEST
io.kestra.plugin:plugin-jdbc-snowflake:LATEST
io.kestra.plugin:plugin-jdbc-sqlserver:LATEST
io.kestra.plugin:plugin-jdbc-trino:LATEST
io.kestra.plugin:plugin-jdbc-vectorwise:LATEST
io.kestra.plugin:plugin-jdbc-vertica:LATEST
io.kestra.plugin:plugin-jdbc-dremio:LATEST
io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
io.kestra.plugin:plugin-jdbc-sqlite:LATEST
io.kestra.plugin:plugin-kafka:LATEST
io.kestra.plugin:plugin-kubernetes:LATEST
io.kestra.plugin:plugin-malloy:LATEST
io.kestra.plugin:plugin-modal:LATEST
io.kestra.plugin:plugin-mongodb:LATEST
io.kestra.plugin:plugin-mqtt:LATEST
io.kestra.plugin:plugin-nats:LATEST
io.kestra.plugin:plugin-neo4j:LATEST
io.kestra.plugin:plugin-notifications:LATEST
io.kestra.plugin:plugin-openai:LATEST
io.kestra.plugin:plugin-powerbi:LATEST
io.kestra.plugin:plugin-pulsar:LATEST
io.kestra.plugin:plugin-redis:LATEST
io.kestra.plugin:plugin-script-groovy:LATEST
io.kestra.plugin:plugin-script-julia:LATEST
io.kestra.plugin:plugin-script-jython:LATEST
io.kestra.plugin:plugin-script-nashorn:LATEST
io.kestra.plugin:plugin-script-node:LATEST
io.kestra.plugin:plugin-script-powershell:LATEST
io.kestra.plugin:plugin-script-python:LATEST
io.kestra.plugin:plugin-script-r:LATEST
io.kestra.plugin:plugin-script-ruby:LATEST
io.kestra.plugin:plugin-script-shell:LATEST
io.kestra.plugin:plugin-serdes:LATEST
io.kestra.plugin:plugin-servicenow:LATEST
io.kestra.plugin:plugin-singer:LATEST
io.kestra.plugin:plugin-soda:LATEST
io.kestra.plugin:plugin-solace:LATEST
io.kestra.plugin:plugin-spark:LATEST
io.kestra.plugin:plugin-sqlmesh:LATEST
io.kestra.plugin:plugin-surrealdb:LATEST
io.kestra.plugin:plugin-terraform:LATEST
io.kestra.plugin:plugin-tika:LATEST
io.kestra.plugin:plugin-weaviate:LATEST
io.kestra.storage:storage-azure:LATEST
io.kestra.storage:storage-gcs:LATEST
io.kestra.storage:storage-minio:LATEST
io.kestra.storage:storage-s3:LATEST
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
python-libs: kestra
steps:
- uses: actions/checkout@v4
# Artifact
- name: Download executable
uses: actions/download-artifact@v4
with:
name: exe
path: build/executable
- name: Copy exe to image
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Vars
- name: Set image name
id: vars
run: |
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" ]]
then
echo "tag=latest" >> $GITHUB_OUTPUT
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
elif [[ $TAG == v* ]]
then
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
else
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
fi
# Docker setup
- name: Set up QEMU
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/release' || startsWith(github.ref, 'refs/tags/v')
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/release' || startsWith(github.ref, 'refs/tags/v')
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Login to DockerHub
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/release' || startsWith(github.ref, 'refs/tags/v')
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
# Docker Build and push
- name: Push to Docker Hub
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/release' || startsWith(github.ref, 'refs/tags/v')
uses: docker/build-push-action@v5
with:
context: .
push: true
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
platforms: linux/amd64,linux/arm64
build-args: |
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
APT_PACKAGES=${{ matrix.image.packages }}
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
maven:
name: Publish to Maven
runs-on: ubuntu-latest
needs: check
if: github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/tags/v')
steps:
- uses: actions/checkout@v4
# Caches
- name: Gradle cache
uses: actions/cache@v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle*.properties') }}
restore-keys: |
${{ runner.os }}-gradle-
- name: Npm cache
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-npm-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-npm-
- name: Node cache
uses: actions/cache@v4
with:
path: node
key: ${{ runner.os }}-node-${{ hashFiles('ui/*.gradle') }}
restore-keys: |
${{ runner.os }}-node-
# JDK
- name: Set up JDK
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 17
# Publish
- name: Publish package to Sonatype
if: github.ref == 'refs/heads/develop'
env:
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
run: |
mkdir -p ~/.gradle/
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
./gradlew publishToSonatype --no-daemon --priority=normal
# Release
- name: Release package to Maven Central
if: startsWith(github.ref, 'refs/tags/v')
env:
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
run: |
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository --no-daemon --priority=normal
end:
runs-on: ubuntu-latest
needs:
- release
- check
- maven
- docker
if: always()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
steps:
- name: Trigger EE Workflow
uses: peter-evans/repository-dispatch@v3
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
# Update
- name: Update internal
uses: benc-uk/workflow-dispatch@v1
if: github.ref == 'refs/heads/develop'
with:
workflow: oss-build.yml
repo: kestra-io/infra
ref: master
token: ${{ secrets.GH_PERSONAL_TOKEN }}
repository: kestra-io/kestra-ee
event-type: "oss-updated"
# Slack
- name: Slack - Notification
if: ${{ failure() && env.SLACK_WEBHOOK_URL != 0 && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') }}
uses: kestra-io/actions/composite/slack-status@main
- name: Slack notification
uses: Gamesight/slack-workflow-status@master
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
repo_token: ${{ secrets.GITHUB_TOKEN }}
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
name: GitHub Actions
icon_emoji: ':github-actions:'
channel: 'C02DQ1A7JLR' # _int_git channel

View File

@@ -1,63 +0,0 @@
name: Pull Request Workflow
on:
pull_request:
branches:
- develop
- releases/*
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
cancel-in-progress: true
jobs:
# ********************************************************************************************************************
# File changes detection
# ********************************************************************************************************************
file-changes:
if: ${{ github.event.pull_request.draft == false }}
name: File changes detection
runs-on: ubuntu-latest
timeout-minutes: 60
outputs:
ui: ${{ steps.changes.outputs.ui }}
translations: ${{ steps.changes.outputs.translations }}
backend: ${{ steps.changes.outputs.backend }}
steps:
- uses: dorny/paths-filter@v3
id: changes
with:
filters: |
ui:
- 'ui/**'
backend:
- '!{ui,.github}/**'
token: ${{ secrets.GITHUB_TOKEN }}
# ********************************************************************************************************************
# Tests
# ********************************************************************************************************************
frontend:
name: Frontend - Tests
needs: [file-changes]
if: "needs.file-changes.outputs.ui == 'true'"
uses: ./.github/workflows/workflow-frontend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
backend:
name: Backend - Tests
needs: file-changes
if: "needs.file-changes.outputs.backend == 'true'"
uses: ./.github/workflows/workflow-backend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
e2e-tests:
name: E2E - Tests
uses: ./.github/workflows/e2e.yml

View File

@@ -1,60 +0,0 @@
name: Set Version and Tag Plugins
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0)'
required: true
type: string
dryRun:
description: 'Use DRY_RUN mode'
required: false
default: 'false'
jobs:
tag:
name: Release plugins
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
id: plugins-list
with:
plugin-version: 'LATEST'
- name: 'Configure Git'
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Set Version and Tag Plugins
if: ${{ github.event.inputs.dryRun == 'false' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/setversion-tag-plugins.sh;
./dev-tools/setversion-tag-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--yes \
${{ steps.plugins-list.outputs.repositories }}
- name: Set Version and Tag Plugins (DRY_RUN)
if: ${{ github.event.inputs.dryRun == 'true' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/setversion-tag-plugins.sh;
./dev-tools/setversion-tag-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--dry-run \
--yes \
${{ steps.plugins-list.outputs.repositories }}

View File

@@ -1,60 +0,0 @@
name: Set Version and Tag
run-name: "Set version and Tag Kestra to ${{ github.event.inputs.releaseVersion }} 🚀"
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.1)'
required: true
type: string
env:
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
jobs:
release:
name: Release Kestra
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/heads/releases/v')
steps:
# Checks
- name: Check Inputs
run: |
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)(\.[0-9]+)(-rc[0-9])?(-SNAPSHOT)?$ ]]; then
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
exit 1
fi
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
CURRENT_BRANCH="$GITHUB_REF"
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
exit 1
fi
# Checkout
- name: Checkout
uses: actions/checkout@v5
with:
fetch-depth: 0
token: ${{ secrets.GH_PERSONAL_TOKEN }}
# Configure
- name: Git - Configure
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
# Update version
sed -i "s/^version=.*/version=$RELEASE_VERSION/" ./gradle.properties
git add ./gradle.properties
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
git push
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
git push --tags

View File

@@ -1,125 +0,0 @@
name: Vulnerabilities Checks
on:
schedule:
- cron: "0 0 * * *" # Every day
workflow_dispatch: {}
env:
JAVA_VERSION: '21'
permissions:
contents: read
jobs:
dependency-check:
name: Dependency Check
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Setup build
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
caches-enabled: true
# Npm
- name: Npm - Install
shell: bash
working-directory: ui
run: npm ci
# Run OWASP dependency check plugin
- name: Gradle Dependency Check
env:
NVD_API_KEY: ${{ secrets.NIST_APIKEY }}
run: |
./gradlew dependencyCheckAggregate
# Upload dependency check report
- name: Upload dependency check report
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: dependency-check-report
path: build/reports/dependency-check-report.html
develop-image-check:
name: Image Check (develop)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: false
node-enabled: false
caches-enabled: true
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.33.1
with:
image-ref: kestra/kestra:develop
format: 'template'
template: '@/contrib/sarif.tpl'
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
skip-dirs: /app/plugins
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
category: docker-
latest-image-check:
name: Image Check (latest)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: false
node-enabled: false
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.33.1
with:
image-ref: kestra/kestra:latest
format: table
skip-dirs: /app/plugins
scanners: vuln
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'

View File

@@ -1,74 +0,0 @@
name: Backend - Tests
on:
workflow_call:
secrets:
GITHUB_AUTH_TOKEN:
description: "The GitHub Token."
required: true
CODECOV_TOKEN:
description: 'Codecov Token'
required: true
SONAR_TOKEN:
description: 'Sonar Token'
required: true
GOOGLE_SERVICE_ACCOUNT:
description: 'Google Service Account'
required: true
permissions:
contents: write
checks: write
actions: read
pull-requests: write
jobs:
test:
name: Backend - Tests
runs-on: ubuntu-latest
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
steps:
- uses: actions/checkout@v5
name: Checkout - Current ref
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
name: Setup - Build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
# Services
- name: Setup - Start docker compose
shell: bash
run: docker compose -f docker-compose-ci.yml up -d
# Gradle check
- name: Gradle - Build
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
shell: bash
run: |
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
./gradlew check javadoc --parallel
- name: comment PR with test report
if: ${{ !cancelled() && github.event_name == 'pull_request' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
run: npx --yes @kestra-io/kestra-devtools generateTestReportSummary --only-errors --ci $(pwd)
# Report Java
- name: Report - Java
uses: kestra-io/actions/composite/report-java@main
if: ${{ !cancelled() }}
with:
secrets: ${{ toJSON(secrets) }}

View File

@@ -1,80 +0,0 @@
name: Build Artifacts
on:
workflow_call: {}
jobs:
build:
name: Build - Artifacts
runs-on: ubuntu-latest
outputs:
docker-tag: ${{ steps.vars.outputs.tag }}
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
plugins: ${{ steps.plugins.outputs.plugins }}
env:
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
steps:
- name: Checkout - Current ref
uses: actions/checkout@v5
with:
fetch-depth: 0
# Npm
- name: Setup - Npm install
shell: bash
working-directory: ui
run: npm ci
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
name: Setup - Build
id: build
with:
java-enabled: true
node-enabled: true
# Get Plugins List
- name: Plugins - Get List
uses: ./.github/actions/plugins-list
if: "!startsWith(github.ref, 'refs/tags/v')"
id: plugins-list
with:
plugin-version: ${{ env.PLUGIN_VERSION }}
# Set Plugins List
- name: Plugins - Set List
id: plugins
if: "!startsWith(github.ref, 'refs/tags/v')"
shell: bash
run: |
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" || $TAG == v* ]]; then
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
else
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ $PLUGINS" >> $GITHUB_OUTPUT
fi
# Build
- name: Gradle - Build
shell: bash
run: |
./gradlew executableJar
- name: Artifacts - Copy exe to image
shell: bash
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Upload artifacts
- name: Artifacts - Upload JAR
uses: actions/upload-artifact@v4
with:
name: jar
path: build/libs/
- name: Artifacts - Upload Executable
uses: actions/upload-artifact@v4
with:
name: exe
path: build/executable/

View File

@@ -1,70 +0,0 @@
name: Frontend - Tests
on:
workflow_call:
secrets:
GITHUB_AUTH_TOKEN:
description: "The GitHub Token."
required: true
CODECOV_TOKEN:
description: 'Codecov Token'
required: true
env:
# to save corepack from itself
COREPACK_INTEGRITY_KEYS: 0
jobs:
test:
name: Frontend - Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Cache Node Modules
id: cache-node-modules
uses: actions/cache@v4
with:
path: |
ui/node_modules
key: modules-${{ hashFiles('ui/package-lock.json') }}
- name: Cache Playwright Binaries
id: cache-playwright
uses: actions/cache@v4
with:
path: |
~/.cache/ms-playwright
key: playwright-${{ hashFiles('ui/package-lock.json') }}
- name: Npm - install
if: steps.cache-node-modules.outputs.cache-hit != 'true'
working-directory: ui
run: npm ci
- name: Npm - lint
uses: reviewdog/action-eslint@v1
with:
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
reporter: github-pr-review
workdir: ui
- name: Npm - Run build
working-directory: ui
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: npm run build
- name: Run front-end unit tests
working-directory: ui
run: npm run test:unit -- --coverage
- name: Storybook - Install Playwright
working-directory: ui
if: steps.cache-playwright.outputs.cache-hit != 'true'
run: npx playwright install --with-deps
- name: Run storybook component tests
working-directory: ui
run: npm run test:storybook -- --coverage

View File

@@ -1,79 +0,0 @@
name: Github - Release
on:
workflow_dispatch:
workflow_call:
secrets:
GH_PERSONAL_TOKEN:
description: "The Github personal token."
required: true
SLACK_RELEASES_WEBHOOK_URL:
description: "The Slack webhook URL."
required: true
jobs:
publish:
name: Github - Release
runs-on: ubuntu-latest
steps:
# Check out
- name: Checkout - Repository
uses: actions/checkout@v5
with:
fetch-depth: 0
submodules: true
# Download Exec
# Must be done after checkout actions
- name: Artifacts - Download executable
uses: actions/download-artifact@v5
if: startsWith(github.ref, 'refs/tags/v')
with:
name: exe
path: build/executable
- name: Check if current tag is latest
id: is_latest
run: |
latest_tag=$(git tag | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | sed 's/^v//' | sort -V | tail -n1)
current_tag="${GITHUB_REF_NAME#v}"
if [ "$current_tag" = "$latest_tag" ]; then
echo "latest=true" >> $GITHUB_OUTPUT
else
echo "latest=false" >> $GITHUB_OUTPUT
fi
env:
GITHUB_REF_NAME: ${{ github.ref_name }}
# GitHub Release
- name: Create GitHub release
uses: kestra-io/actions/composite/github-release@main
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
env:
MAKE_LATEST: ${{ steps.is_latest.outputs.latest }}
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
# Trigger gha workflow to bump helm chart version
- name: GitHub - Trigger the Helm chart version bump
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.GH_PERSONAL_TOKEN }}
repository: kestra-io/helm-charts
event-type: update-helm-chart-version
client-payload: |-
{
"new_version": "${{ github.ref_name }}",
"github_repository": "${{ github.repository }}",
"github_actor": "${{ github.actor }}"
}
- name: Merge Release Notes
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
uses: kestra-io/actions/composite/github-release-note-merge@main
env:
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
RELEASE_TAG: ${{ github.ref_name }}

View File

@@ -1,208 +0,0 @@
name: Create Docker images on Release
on:
workflow_dispatch:
inputs:
retag-latest:
description: 'Retag latest Docker images'
required: true
type: choice
default: "false"
options:
- "true"
- "false"
retag-lts:
description: 'Retag LTS Docker images'
required: true
type: choice
default: "false"
options:
- "true"
- "false"
release-tag:
description: 'Kestra Release Tag (by default, deduced with the ref)'
required: false
type: string
plugin-version:
description: 'Plugin version'
required: false
type: string
default: "LATEST"
force-download-artifact:
description: 'Force download artifact'
required: false
type: choice
default: "true"
options:
- "true"
- "false"
workflow_call:
inputs:
plugin-version:
description: "Plugin version"
default: 'LATEST'
required: false
type: string
force-download-artifact:
description: 'Force download artifact'
required: false
type: string
default: "true"
secrets:
DOCKERHUB_USERNAME:
description: "The Dockerhub username."
required: true
DOCKERHUB_PASSWORD:
description: "The Dockerhub password."
required: true
env:
PLUGIN_VERSION: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
jobs:
plugins:
name: List Plugins
runs-on: ubuntu-latest
outputs:
plugins: ${{ steps.plugins.outputs.plugins }}
steps:
# Checkout
- uses: actions/checkout@v5
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
id: plugins
with: # remap LATEST-SNAPSHOT to LATEST
plugin-version: ${{ env.PLUGIN_VERSION == 'LATEST-SNAPSHOT' && 'LATEST' || env.PLUGIN_VERSION }}
# ********************************************************************************************************************
# Build
# ********************************************************************************************************************
build-artifacts:
name: Build Artifacts
if: ${{ inputs.force-download-artifact == 'true' }}
uses: ./.github/workflows/workflow-build-artifacts.yml
docker:
name: Publish Docker
needs: [ plugins, build-artifacts ]
if: always()
runs-on: ubuntu-latest
strategy:
matrix:
image:
- name: "-no-plugins"
plugins: ""
packages: jattach
python-libs: ""
- name: ""
plugins: ${{needs.plugins.outputs.plugins}}
packages: python3 python-is-python3 python3-pip curl jattach
python-libs: kestra
steps:
- uses: actions/checkout@v5
# Vars
- name: Set image name
id: vars
run: |
if [[ "${{ inputs.release-tag }}" == "" ]]; then
TAG=${GITHUB_REF#refs/*/}
echo "tag=${TAG}" >> $GITHUB_OUTPUT
else
TAG="${{ inputs.release-tag }}"
echo "tag=${TAG}" >> $GITHUB_OUTPUT
fi
if [[ $GITHUB_REF == refs/tags/* ]]; then
if [[ $TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
# this will remove the patch version number
MINOR_SEMVER=${TAG%.*}
echo "minor_semver=${MINOR_SEMVER}" >> $GITHUB_OUTPUT
else
echo "Tag '$TAG' is not a valid semver (vMAJOR.MINOR.PATCH), skipping minor_semver"
fi
fi
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
else
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
fi
# Download executable from artifact
- name: Artifacts - Download executable
uses: actions/download-artifact@v5
with:
name: exe
path: build/executable
- name: Copy exe to image
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker setup
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Fix Qemu
shell: bash
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
# Docker Build and push
- name: Push to Docker Hub
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
platforms: linux/amd64,linux/arm64
build-args: |
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
APT_PACKAGES=${{ matrix.image.packages }}
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
- name: Install regctl
if: startsWith(github.ref, 'refs/tags/v')
uses: regclient/actions/regctl-installer@main
- name: Retag to minor semver version
if: startsWith(github.ref, 'refs/tags/v') && steps.vars.outputs.minor_semver != ''
run: |
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.minor_semver, matrix.image.name) }}
- name: Retag to latest
if: startsWith(github.ref, 'refs/tags/v') && inputs.retag-latest == 'true'
run: |
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
- name: Retag to LTS
if: startsWith(github.ref, 'refs/tags/v') && inputs.retag-lts == 'true'
run: |
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest-lts{0}', matrix.image.name) }}
end:
runs-on: ubuntu-latest
needs:
- docker
if: always()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
steps:
- name: Slack notification
if: ${{ failure() && env.SLACK_WEBHOOK_URL != 0 }}
uses: kestra-io/actions/composite/slack-status@main
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@@ -1,57 +0,0 @@
name: Publish - Maven
on:
workflow_call:
secrets:
SONATYPE_USER:
description: "The Sonatype username."
required: true
SONATYPE_PASSWORD:
description: "The Sonatype password."
required: true
SONATYPE_GPG_KEYID:
description: "The Sonatype GPG key id."
required: true
SONATYPE_GPG_PASSWORD:
description: "The Sonatype GPG password."
required: true
SONATYPE_GPG_FILE:
description: "The Sonatype GPG file."
required: true
jobs:
publish:
name: Publish - Maven
runs-on: ubuntu-latest
steps:
- name: Checkout - Current ref
uses: actions/checkout@v5
# Setup build
- name: Setup - Build
uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: true
node-enabled: true
# Publish
- name: Publish - Release package to Maven Central
shell: bash
env:
ORG_GRADLE_PROJECT_mavenCentralUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_mavenCentralPassword: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
run: |
mkdir -p ~/.gradle/
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
./gradlew publishToMavenCentral
# Gradle dependency
- name: Java - Gradle dependency graph
uses: gradle/actions/dependency-submission@v4

View File

@@ -1,16 +0,0 @@
name: Pull Request - Delete Docker
on:
pull_request:
types: [closed]
jobs:
publish:
name: Pull Request - Delete Docker
if: github.repository == 'kestra-io/kestra' # prevent running on forks
runs-on: ubuntu-latest
steps:
- uses: dataaxiom/ghcr-cleanup-action@v1
with:
package: kestra-pr
delete-tags: ${{ github.event.pull_request.number }}

View File

@@ -1,78 +0,0 @@
name: Pull Request - Publish Docker
on:
pull_request:
branches:
- develop
jobs:
build-artifacts:
name: Build Artifacts
if: github.repository == 'kestra-io/kestra' # prevent running on forks
uses: ./.github/workflows/workflow-build-artifacts.yml
publish:
name: Publish Docker
if: github.repository == 'kestra-io/kestra' # prevent running on forks
runs-on: ubuntu-latest
needs: build-artifacts
env:
GITHUB_IMAGE_PATH: "ghcr.io/kestra-io/kestra-pr"
steps:
- name: Checkout - Current ref
uses: actions/checkout@v5
with:
fetch-depth: 0
# Docker setup
- name: Docker - Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Setup Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Build Docker Image
- name: Artifacts - Download executable
uses: actions/download-artifact@v5
with:
name: exe
path: build/executable
- name: Docker - Copy exe to image
shell: bash
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
- name: Docker - Build image
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile.pr
push: true
tags: ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}
platforms: linux/amd64,linux/arm64
# Add comment on pull request
- name: Add comment to PR
uses: actions/github-script@v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
await github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `**🐋 Docker image**: \`${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}\`\n` +
`\n` +
`\`\`\`bash\n` +
`docker run --pull=always --rm -it -p 8080:8080 --user=root -v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }} server local\n` +
`\`\`\``
})

View File

@@ -1,85 +0,0 @@
name: Release
on:
workflow_dispatch:
inputs:
plugin-version:
description: "plugins version"
default: 'LATEST'
required: false
type: string
publish-docker:
description: "Publish Docker image"
default: 'false'
required: false
type: string
workflow_call:
inputs:
plugin-version:
description: "plugins version"
default: 'LATEST'
required: false
type: string
secrets:
DOCKERHUB_USERNAME:
description: "The Dockerhub username."
required: true
DOCKERHUB_PASSWORD:
description: "The Dockerhub password."
required: true
SONATYPE_USER:
description: "The Sonatype username."
required: true
SONATYPE_PASSWORD:
description: "The Sonatype password."
required: true
SONATYPE_GPG_KEYID:
description: "The Sonatype GPG key id."
required: true
SONATYPE_GPG_PASSWORD:
description: "The Sonatype GPG password."
required: true
SONATYPE_GPG_FILE:
description: "The Sonatype GPG file."
required: true
GH_PERSONAL_TOKEN:
description: "GH personnal Token."
required: true
SLACK_RELEASES_WEBHOOK_URL:
description: "Slack webhook for releases channel."
required: true
jobs:
build-artifacts:
name: Build - Artifacts
uses: ./.github/workflows/workflow-build-artifacts.yml
Docker:
name: Publish Docker
needs: build-artifacts
uses: ./.github/workflows/workflow-publish-docker.yml
if: github.ref == 'refs/heads/develop' || inputs.publish-docker == 'true'
with:
force-download-artifact: 'false'
plugin-version: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
Maven:
name: Publish Maven
uses: ./.github/workflows/workflow-publish-maven.yml
secrets:
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
Github:
name: Github Release
needs: build-artifacts
if: startsWith(github.ref, 'refs/tags/v')
uses: ./.github/workflows/workflow-github-release.yml
secrets:
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}

View File

@@ -1,95 +0,0 @@
name: Tests
on:
schedule:
- cron: '0 4 * * 1,2,3,4,5'
workflow_call:
inputs:
report-status:
description: "Report status of the jobs in outputs"
type: string
required: false
default: false
outputs:
frontend_status:
description: "Status of the frontend job"
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
backend_status:
description: "Status of the backend job"
value: ${{ jobs.set-backend-status.outputs.backend_status }}
jobs:
file-changes:
name: File changes detection
runs-on: ubuntu-latest
timeout-minutes: 60
outputs:
ui: ${{ steps.changes.outputs.ui }}
backend: ${{ steps.changes.outputs.backend }}
steps:
- uses: actions/checkout@v5
if: "!startsWith(github.ref, 'refs/tags/v')"
- uses: dorny/paths-filter@v3
if: "!startsWith(github.ref, 'refs/tags/v')"
id: changes
with:
filters: |
ui:
- 'ui/**'
backend:
- '!{ui,.github}/**'
token: ${{ secrets.GITHUB_TOKEN }}
frontend:
name: Frontend - Tests
needs: file-changes
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
uses: ./.github/workflows/workflow-frontend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
backend:
name: Backend - Tests
needs: file-changes
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
uses: ./.github/workflows/workflow-backend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
# Output every job status
# To be used in other workflows
report-status:
name: Report Status
runs-on: ubuntu-latest
needs: [ frontend, backend ]
if: always() && (inputs.report-status == 'true')
outputs:
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
steps:
- id: set-frontend-status
name: Set frontend job status
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
- id: set-backend-status
name: Set backend job status
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
notify:
name: Notify - Slack
runs-on: ubuntu-latest
needs: [ frontend, backend ]
steps:
- name: Notify failed CI
if: |
always() &&
(needs.frontend.result != 'success' || needs.backend.result != 'success') &&
(github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop')
uses: kestra-io/actions/composite/slack-status@main
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}

14
.gitignore vendored
View File

@@ -7,7 +7,6 @@ target/
out/
.idea
.vscode
prettierrc.js
*.iml
*.ipr
*.iws
@@ -29,6 +28,8 @@ node_modules
yarn-error.log
yarn.lock
ui/node_modules
ui/public/vscode
ui/public/vscode-web
ui/.env.local
ui/.env.*.local
webserver/src/main/resources/ui
@@ -36,8 +37,6 @@ yarn.lock
ui/coverage
ui/stats.html
ui/.frontend-gradle-plugin
ui/utils/CHANGELOG.md
ui/test-report.junit.xml
### Docker
/.env
@@ -52,11 +51,4 @@ core/src/main/resources/gradle.properties
.plugins.override
# H2 Database
/data
# Allure Reports
**/allure-results/*
*storybook.log
storybook-static
/jmh-benchmarks/src/main/resources/gradle.properties
data

205
.plugins
View File

@@ -1,129 +1,84 @@
#
# List of plugins to install locally with: $ make install-plugins
# Format: <RepositoryName>:<GroupId>:<ArtifactId>:<Version>
#
# Uncomment the lines corresponding to the plugins to be installed:
#plugin-ai:io.kestra.plugin:plugin-ai:LATEST
#plugin-airbyte:io.kestra.plugin:plugin-airbyte:LATEST
#plugin-airflow:io.kestra.plugin:plugin-airflow:LATEST
#plugin-amqp:io.kestra.plugin:plugin-amqp:LATEST
#plugin-ansible:io.kestra.plugin:plugin-ansible:LATEST
#plugin-anthropic:io.kestra.plugin:plugin-anthropic:LATEST
#plugin-aws:io.kestra.plugin:plugin-aws:LATEST
#plugin-azure:io.kestra.plugin:plugin-azure:LATEST
#plugin-cassandra:io.kestra.plugin:plugin-cassandra:LATEST
#plugin-cloudquery:io.kestra.plugin:plugin-cloudquery:LATEST
#plugin-compress:io.kestra.plugin:plugin-compress:LATEST
#plugin-couchbase:io.kestra.plugin:plugin-couchbase:LATEST
#plugin-crypto:io.kestra.plugin:plugin-crypto:LATEST
#plugin-databricks:io.kestra.plugin:plugin-databricks:LATEST
#plugin-datahub:io.kestra.plugin:plugin-datahub:LATEST
#plugin-dataform:io.kestra.plugin:plugin-dataform:LATEST
#plugin-datagen:io.kestra.plugin:plugin-datagen:LATEST
#plugin-dbt:io.kestra.plugin:plugin-dbt:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-db2:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-mongodb:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-mysql:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-oracle:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-postgres:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-sqlserver:LATEST
#plugin-deepseek:io.kestra.plugin:plugin-deepseek:LATEST
#plugin-docker:io.kestra.plugin:plugin-docker:LATEST
#plugin-elasticsearch:io.kestra.plugin:plugin-elasticsearch:LATEST
#plugin-fivetran:io.kestra.plugin:plugin-fivetran:LATEST
#plugin-fs:io.kestra.plugin:plugin-fs:LATEST
#plugin-gcp:io.kestra.plugin:plugin-gcp:LATEST
#plugin-gemini:io.kestra.plugin:plugin-gemini:LATEST
#plugin-git:io.kestra.plugin:plugin-git:LATEST
#plugin-github:io.kestra.plugin:plugin-github:LATEST
#plugin-gitlab:io.kestra.plugin:plugin-gitlab:LATEST
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
#plugin-graphql:io.kestra.plugin:plugin-graphql:LATEST
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
#plugin-influxdb:io.kestra.plugin:plugin-influxdb:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-as400:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-db2:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-duckdb:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-druid:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mariadb:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mysql:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-oracle:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-pinot:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-postgres:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-redshift:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-snowflake:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlserver:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-trino:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-vectorwise:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-vertica:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-dremio:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlite:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
#plugin-meilisearch:io.kestra.plugin:plugin-meilisearch:LATEST
#plugin-minio:io.kestra.plugin:plugin-minio:LATEST
#plugin-mistral:io.kestra.plugin:plugin-mistral:LATEST
#plugin-modal:io.kestra.plugin:plugin-modal:LATEST
#plugin-mongodb:io.kestra.plugin:plugin-mongodb:LATEST
#plugin-mqtt:io.kestra.plugin:plugin-mqtt:LATEST
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
#plugin-notion:io.kestra.plugin:plugin-notion:LATEST
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
#plugin-perplexity:io.kestra.plugin:plugin-perplexity:LATEST
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-bun:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-deno:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jython:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-lua:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-nashorn:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-node:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-perl:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-php:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-powershell:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-python:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-r:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-ruby:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-shell:LATEST
#plugin-serdes:io.kestra.plugin:plugin-serdes:LATEST
#plugin-servicenow:io.kestra.plugin:plugin-servicenow:LATEST
#plugin-sifflet:io.kestra.plugin:plugin-sifflet:LATEST
#plugin-soda:io.kestra.plugin:plugin-soda:LATEST
#plugin-solace:io.kestra.plugin:plugin-solace:LATEST
#plugin-spark:io.kestra.plugin:plugin-spark:LATEST
#plugin-sqlmesh:io.kestra.plugin:plugin-sqlmesh:LATEST
#plugin-supabase:io.kestra.plugin:plugin-supabase:LATEST
#plugin-surrealdb:io.kestra.plugin:plugin-surrealdb:LATEST
#plugin-terraform:io.kestra.plugin:plugin-terraform:LATEST
#plugin-transform:io.kestra.plugin:plugin-transform-grok:LATEST
#plugin-transform:io.kestra.plugin:plugin-transform-json:LATEST
#plugin-tika:io.kestra.plugin:plugin-tika:LATEST
#plugin-trivy:io.kestra.plugin:plugin-trivy:LATEST
#plugin-weaviate:io.kestra.plugin:plugin-weaviate:LATEST
#plugin-zendesk:io.kestra.plugin:plugin-zendesk:LATEST
#plugin-typesense:io.kestra.plugin:plugin-typesense:LATEST
#storage-azure:io.kestra.storage:storage-azure:LATEST
#storage-gcs:io.kestra.storage:storage-gcs:LATEST
#storage-minio:io.kestra.storage:storage-minio:LATEST
#storage-s3:io.kestra.storage:storage-s3:LATEST
#
#io.kestra.plugin:plugin-airbyte:LATEST
#io.kestra.plugin:plugin-amqp:LATEST
#io.kestra.plugin:plugin-ansible:LATEST
#io.kestra.plugin:plugin-aws:LATEST
#io.kestra.plugin:plugin-azure:LATEST
#io.kestra.plugin:plugin-cassandra:LATEST
#io.kestra.plugin:plugin-cloudquery:LATEST
#io.kestra.plugin:plugin-compress:LATEST
#io.kestra.plugin:plugin-couchbase:LATEST
#io.kestra.plugin:plugin-crypto:LATEST
#io.kestra.plugin:plugin-databricks:LATEST
#io.kestra.plugin:plugin-dataform:LATEST
#io.kestra.plugin:plugin-dbt:LATEST
#io.kestra.plugin:plugin-debezium-mysql:LATEST
#io.kestra.plugin:plugin-debezium-postgres:LATEST
#io.kestra.plugin:plugin-debezium-sqlserver:LATEST
#io.kestra.plugin:plugin-docker:LATEST
#io.kestra.plugin:plugin-elasticsearch:LATEST
#io.kestra.plugin:plugin-fivetran:LATEST
#io.kestra.plugin:plugin-fs:LATEST
#io.kestra.plugin:plugin-gcp:LATEST
#io.kestra.plugin:plugin-git:LATEST
#io.kestra.plugin:plugin-googleworkspace:LATEST
#io.kestra.plugin:plugin-hightouch:LATEST
#io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
#io.kestra.plugin:plugin-jdbc-duckdb:LATEST
#io.kestra.plugin:plugin-jdbc-druid:LATEST
#io.kestra.plugin:plugin-jdbc-mysql:LATEST
#io.kestra.plugin:plugin-jdbc-oracle:LATEST
#io.kestra.plugin:plugin-jdbc-pinot:LATEST
#io.kestra.plugin:plugin-jdbc-postgres:LATEST
#io.kestra.plugin:plugin-jdbc-redshift:LATEST
#io.kestra.plugin:plugin-jdbc-rockset:LATEST
#io.kestra.plugin:plugin-jdbc-snowflake:LATEST
#io.kestra.plugin:plugin-jdbc-sqlserver:LATEST
#io.kestra.plugin:plugin-jdbc-trino:LATEST
#io.kestra.plugin:plugin-jdbc-vectorwise:LATEST
#io.kestra.plugin:plugin-jdbc-vertica:LATEST
#io.kestra.plugin:plugin-jdbc-dremio:LATEST
#io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
#io.kestra.plugin:plugin-jdbc-sqlite:LATEST
#io.kestra.plugin:plugin-kafka:LATEST
#io.kestra.plugin:plugin-kubernetes:LATEST
#io.kestra.plugin:plugin-malloy:LATEST
#io.kestra.plugin:plugin-modal:LATEST
#io.kestra.plugin:plugin-mongodb:LATEST
#io.kestra.plugin:plugin-mqtt:LATEST
#io.kestra.plugin:plugin-nats:LATEST
#io.kestra.plugin:plugin-neo4j:LATEST
#io.kestra.plugin:plugin-notifications:LATEST
#io.kestra.plugin:plugin-openai:LATEST
#io.kestra.plugin:plugin-powerbi:LATEST
#io.kestra.plugin:plugin-pulsar:LATEST
#io.kestra.plugin:plugin-redis:LATEST
#io.kestra.plugin:plugin-script-groovy:LATEST
#io.kestra.plugin:plugin-script-julia:LATEST
#io.kestra.plugin:plugin-script-jython:LATEST
#io.kestra.plugin:plugin-script-nashorn:LATEST
#io.kestra.plugin:plugin-script-node:LATEST
#io.kestra.plugin:plugin-script-powershell:LATEST
#io.kestra.plugin:plugin-script-python:LATEST
#io.kestra.plugin:plugin-script-r:LATEST
#io.kestra.plugin:plugin-script-ruby:LATEST
#io.kestra.plugin:plugin-script-shell:LATEST
#io.kestra.plugin:plugin-serdes:LATEST
#io.kestra.plugin:plugin-servicenow:LATEST
#io.kestra.plugin:plugin-singer:LATEST
#io.kestra.plugin:plugin-soda:LATEST
#io.kestra.plugin:plugin-solace:LATEST
#io.kestra.plugin:plugin-spark:LATEST
#io.kestra.plugin:plugin-sqlmesh:LATEST
#io.kestra.plugin:plugin-surrealdb:LATEST
#io.kestra.plugin:plugin-terraform:LATEST
#io.kestra.plugin:plugin-tika:LATEST
#io.kestra.plugin:plugin-weaviate:LATEST
#io.kestra.storage:storage-azure:LATEST
#io.kestra.storage:storage-gcs:LATEST
#io.kestra.storage:storage-minio:LATEST
#io.kestra.storage:storage-s3:LATEST

View File

@@ -1 +0,0 @@
**/*.*

305
AGENTS.md
View File

@@ -1,305 +0,0 @@
# Kestra AGENTS.md
This file provides guidance for AI coding agents working on the Kestra project. Kestra is an open-source data orchestration and scheduling platform built with Java (Micronaut) and Vue.js.
## Repository Layout
- **`core/`**: Core Kestra framework and task definitions
- **`cli/`**: Command-line interface and server implementation
- **`webserver/`**: REST API server implementation
- **`ui/`**: Vue.js frontend application
- **`jdbc-*`**: Database connector modules (H2, MySQL, PostgreSQL)
- **`script/`**: Script execution engine
- **`storage-local/`**: Local file storage implementation
- **`repository-memory/`**: In-memory repository implementation
- **`runner-memory/`**: In-memory execution runner
- **`processor/`**: Task processing engine
- **`model/`**: Data models and Data Transfer Objects
- **`platform/`**: Platform-specific implementations
- **`tests/`**: Integration test framework
- **`e2e-tests/`**: End-to-end testing suite
## Development Environment
### Prerequisites
- Java 21+
- Node.js 22+ and npm
- Python 3, pip, and python venv
- Docker & Docker Compose
- Gradle (wrapper included)
### Quick Setup with Devcontainer
The easiest way to get started is using the provided devcontainer:
1. Install VSCode Remote Development extension
2. Run `Dev Containers: Open Folder in Container...` from command palette
3. Select the Kestra root folder
4. Wait for Gradle build to complete
### Manual Setup
1. Clone the repository
2. Run `./gradlew build` to build the backend
3. Navigate to `ui/` and run `npm install`
4. Create configuration files as described below
## Configuration Files
### Backend Configuration
Create `cli/src/main/resources/application-override.yml`:
**Local Mode (H2 database):**
```yaml
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
**Standalone Mode (PostgreSQL):**
```yaml
kestra:
repository:
type: postgres
storage:
type: local
local:
base-path: "/app/storage"
queue:
type: postgres
tasks:
tmp-dir:
path: /tmp/kestra-wd/tmp
anonymous-usage-report:
enabled: false
datasources:
postgres:
url: jdbc:postgresql://host.docker.internal:5432/kestra
driverClassName: org.postgresql.Driver
username: kestra
password: k3str4
flyway:
datasources:
postgres:
enabled: true
locations:
- classpath:migrations/postgres
ignore-migration-patterns: "*:missing,*:future"
out-of-order: true
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
### Frontend Configuration
Create `ui/.env.development.local` for environment variables.
## Running the Application
### Backend
- **Local mode**: `./gradlew runLocal` (uses H2 database)
- **Standalone mode**: Use VSCode Run and Debug with main class `io.kestra.cli.App` and args `server standalone`
### Frontend
- Navigate to `ui/` directory
- Run `npm run dev` for development server (port 5173)
- Run `npm run build` for production build
## Building and Testing
### Backend
```bash
# Build the project
./gradlew build
# Run tests
./gradlew test
# Run specific module tests
./gradlew :core:test
# Clean build
./gradlew clean build
```
### Frontend
```bash
cd ui
npm install
npm run test
npm run lint
npm run build
```
### End-to-End Tests
```bash
# Build and start E2E tests
./build-and-start-e2e-tests.sh
# Or use the Makefile
make install
make install-plugins
make start-standalone-postgres
```
## Development Guidelines
### Java Backend
- Use Java 21 features
- Follow Micronaut framework patterns
- Add Swagger annotations for API documentation
- Use annotation processors (enable in IDE)
- Set `MICRONAUT_ENVIRONMENTS=local,override` for custom config
- Set `KESTRA_PLUGINS_PATH` for custom plugin loading
### Vue.js Frontend
- Vue 3 with Composition API
- TypeScript for type safety
- Vite for build tooling
- ESLint and Prettier for code quality
- Component-based architecture in `src/components/`
### Code Style
- Follow `.editorconfig` settings
- Use 4 spaces for Java, 2 spaces for YAML/JSON/CSS
- Enable format on save in VSCode
- Use Prettier for frontend code formatting
## Testing Strategy
### Backend Testing
- Unit tests in `src/test/java/`
- Integration tests in `tests/` module
- Use Micronaut test framework
- Test both local and standalone modes
### Frontend Testing
- Unit tests with Jest
- E2E tests with Playwright
- Component testing with Storybook
- Run `npm run test:unit` and `npm run test:e2e`
## Plugin Development
### Creating Plugins
- Follow the [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/)
- Place JAR files in `KESTRA_PLUGINS_PATH`
- Use the plugin template structure
- Test with both local and standalone modes
### Plugin Loading
- Set `KESTRA_PLUGINS_PATH` environment variable
- Use devcontainer mounts for local development
- Plugins are loaded at startup
## Common Issues and Solutions
### JavaScript Heap Out of Memory
Set `NODE_OPTIONS=--max-old-space-size=4096` environment variable.
### CORS Issues
Ensure backend CORS is configured for `http://localhost:5173` when using frontend dev server.
### Database Connection Issues
- Use `host.docker.internal` instead of `localhost` when connecting from devcontainer
- Verify PostgreSQL is running and accessible
- Check database credentials and permissions
### Gradle Build Issues
- Clear Gradle cache: `./gradlew clean`
- Check Java version compatibility
- Verify all dependencies are available
## Pull Request Guidelines
### Before Submitting
1. Run all tests: `./gradlew test` and `npm test`
2. Check code formatting: `./gradlew spotlessCheck`
3. Verify CORS configuration if changing API
4. Test both local and standalone modes
5. Update documentation for user-facing changes
### Commit Messages
- Follow conventional commit format
- Use present tense ("Add feature" not "Added feature")
- Reference issue numbers when applicable
- Keep commits focused and atomic
### Review Checklist
- [ ] All tests pass
- [ ] Code follows project style guidelines
- [ ] Documentation is updated
- [ ] No breaking changes without migration guide
- [ ] CORS properly configured if API changes
- [ ] Both local and standalone modes tested
## Useful Commands
```bash
# Quick development commands
./gradlew runLocal # Start local backend
./gradlew :ui:build # Build frontend
./gradlew clean build # Clean rebuild
npm run dev # Start frontend dev server
make install # Install Kestra locally
make start-standalone-postgres # Start with PostgreSQL
# Testing commands
./gradlew test # Run all backend tests
./gradlew :core:test # Run specific module tests
npm run test # Run frontend tests
npm run lint # Lint frontend code
```
## Getting Help
- Open a [GitHub issue](https://github.com/kestra-io/kestra/issues)
- Join the [Kestra Slack community](https://kestra.io/slack)
- Check the [main documentation](https://kestra.io/docs)
## Environment Variables
| Variable | Description | Default |
|----------|-------------|---------|
| `MICRONAUT_ENVIRONMENTS` | Custom config environments | `local,override` |
| `KESTRA_PLUGINS_PATH` | Path to custom plugins | `/workspaces/kestra/local/plugins` |
| `NODE_OPTIONS` | Node.js options | `--max-old-space-size=4096` |
| `JAVA_HOME` | Java installation path | `/usr/java/jdk-21` |
Remember: Always test your changes in both local and standalone modes, and ensure CORS is properly configured for frontend development.

View File

@@ -1,4 +1,4 @@
FROM eclipse-temurin:21-jre-jammy
FROM eclipse-temurin:17-jre
ARG KESTRA_PLUGINS=""
ARG APT_PACKAGES=""
@@ -16,9 +16,8 @@ RUN apt-get update -y && \
if [ -n "${APT_PACKAGES}" ]; then apt-get install -y --no-install-recommends ${APT_PACKAGES}; fi && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /var/tmp/* /tmp/* && \
curl -LsSf https://astral.sh/uv/0.6.17/install.sh | sh && mv /root/.local/bin/uv /bin && mv /root/.local/bin/uvx /bin && \
if [ -n "${KESTRA_PLUGINS}" ]; then /app/kestra plugins install ${KESTRA_PLUGINS} && rm -rf /tmp/*; fi && \
if [ -n "${PYTHON_LIBRARIES}" ]; then uv pip install --system ${PYTHON_LIBRARIES}; fi && \
if [ -n "${PYTHON_LIBRARIES}" ]; then pip install ${PYTHON_LIBRARIES}; fi && \
chown -R kestra:kestra /app
USER kestra

View File

@@ -1,7 +0,0 @@
FROM kestra/kestra:develop
USER root
COPY --chown=kestra:kestra docker /
USER kestra

100
Makefile
View File

@@ -17,8 +17,6 @@ VERSION := $(shell ./gradlew properties -q | awk '/^version:/ {print $$2}')
GIT_COMMIT := $(shell git rev-parse --short HEAD)
GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
DATE := $(shell date --rfc-3339=seconds)
PLUGIN_GIT_DIR ?= $(pwd)/..
PLUGIN_JARS_DIR ?= $(pwd)/locals/plugins
DOCKER_IMAGE = kestra/kestra
DOCKER_PATH = ./
@@ -69,7 +67,6 @@ install-plugins:
[[ $$plugin =~ ^#.* ]] && continue; \
PLUGINS_PATH="${KESTRA_INSTALL_DIR}/plugins"; \
CURRENT_PLUGIN=$${plugin/LATEST/"${VERSION}"}; \
CURRENT_PLUGIN=$$(echo $$CURRENT_PLUGIN | cut -d':' -f2-); \
PLUGIN_FILE="$$PLUGINS_PATH/$$(echo $$CURRENT_PLUGIN | awk -F':' '{print $$2"-"$$3}').jar"; \
echo "Installing Kestra plugin $$CURRENT_PLUGIN > ${KESTRA_INSTALL_DIR}/plugins"; \
if [ -f "$$PLUGIN_FILE" ]; then \
@@ -77,7 +74,7 @@ install-plugins:
else \
${KESTRA_BASEDIR}/bin/kestra plugins install $$CURRENT_PLUGIN \
--plugins ${KESTRA_BASEDIR}/plugins \
--repositories=https://central.sonatype.com/repository/maven-snapshots || exit 1; \
--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots || exit 1; \
fi \
done < $$PLUGIN_LIST
@@ -89,8 +86,6 @@ build-docker: build-exec
--compress \
--rm \
-f ./Dockerfile \
--build-arg="APT_PACKAGES=python3 python-is-python3 python3-pip curl jattach" \
--build-arg="PYTHON_LIBRARIES=kestra" \
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
# Verify whether Kestra is running
@@ -130,6 +125,9 @@ datasources:
username: kestra
password: k3str4
kestra:
server:
basic-auth:
enabled: false
encryption:
secret-key: 3ywuDa/Ec61VHkOX3RlI9gYq7CaD0mv0Pf3DHtAXA6U=
repository:
@@ -146,10 +144,7 @@ export KESTRA_POSTGRES_CONFIGURATION
# Build and deploy Kestra in standalone mode (using Postgres backend)
--private-start-standalone-postgres:
docker compose -f ./docker-compose-ci.yml up postgres -d;
echo "Waiting for postgres to be running"
until [ "`docker inspect -f {{.State.Running}} kestra-postgres-1`"=="true" ]; do \
sleep 1; \
done; \
rm -rf ${KESTRA_BASEDIR}/bin/confs/ && \
mkdir -p ${KESTRA_BASEDIR}/bin/confs/ ${KESTRA_BASEDIR}/logs/ && \
touch ${KESTRA_BASEDIR}/bin/confs/application.yml
@@ -173,88 +168,3 @@ start-standalone-postgres: kill --private-start-standalone-postgres health
start-standalone-local: kill --private-start-standalone-local health
#checkout all plugins
clone-plugins:
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
@mkdir -p "$(PLUGIN_GIT_DIR)"
@echo "Fetching repository list from GitHub..."
@REPOS=$$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-"); \
for repo in $$REPOS; do \
if [[ $$repo == plugin-* ]]; then \
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
echo "Skipping: $$repo (Already cloned)"; \
else \
echo "Cloning: $$repo using SSH..."; \
git clone "git@github.com:kestra-io/$$repo.git" "$(PLUGIN_GIT_DIR)/$$repo"; \
fi; \
fi; \
done
@echo "Done!"
# Pull every plugins in main or master branch
pull-plugins:
@echo "🔍 Pulling repositories in '$(PLUGIN_GIT_DIR)'..."
@for repo in "$(PLUGIN_GIT_DIR)"/*; do \
if [ -d "$$repo/.git" ]; then \
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
echo "🔄 Pulling: $$(basename "$$repo") (branch: $$branch)"; \
git -C "$$repo" pull; \
else \
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch, currently on $$branch)"; \
fi; \
fi; \
done
@echo "✅ Done pulling!"
# Update all plugins jar
build-plugins:
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
@MASTER_REPOS=(); \
for repo in "$(PLUGIN_GIT_DIR)"/*; do \
if [ -d "$$repo/.git" ]; then \
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
MASTER_REPOS+=("$$repo"); \
else \
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch)"; \
fi; \
fi; \
done; \
\
# === STEP 2: Update Repos on Master or Main Branch === \
echo "⬇️ Updating repositories on master or main branch..."; \
for repo in "$${MASTER_REPOS[@]}"; do \
echo "🔄 Updating: $$(basename "$$repo")"; \
git -C "$$repo" pull --rebase; \
done; \
\
# === STEP 3: Build with Gradle === \
echo "⚙️ Building repositories with Gradle..."; \
for repo in "$${MASTER_REPOS[@]}"; do \
echo "🔨 Building: $$(basename "$$repo")"; \
gradle clean build -x test shadowJar -p "$$repo"; \
done; \
\
# === STEP 4: Copy Latest JARs (Ignoring javadoc & sources) === \
echo "📦 Organizing built JARs..."; \
mkdir -p "$(PLUGIN_JARS_DIR)"; \
for repo in "$${MASTER_REPOS[@]}"; do \
REPO_NAME=$$(basename "$$repo"); \
\
JARS=($$(find "$$repo" -type f -name "plugin-*.jar" ! -name "*-javadoc.jar" ! -name "*-sources.jar")); \
if [ $${#JARS[@]} -eq 0 ]; then \
echo "⚠️ Warning: No valid plugin JARs found for $$REPO_NAME"; \
continue; \
fi; \
\
for jar in "$${JARS[@]}"; do \
JAR_NAME=$$(basename "$$jar"); \
BASE_NAME=$$(echo "$$JAR_NAME" | sed -E 's/(-[0-9]+.*)?\.jar$$//'); \
rm -f "$(PLUGIN_JARS_DIR)/$$BASE_NAME"-[0-9]*.jar; \
cp "$$jar" "$(PLUGIN_JARS_DIR)/"; \
echo "✅ Copied JAR: $$JAR_NAME"; \
done; \
done; \
\
echo "🎉 Done! All master and main branch repos updated, built, and organized."

486
README.md
View File

@@ -5,7 +5,7 @@
</p>
<h1 align="center" style="border-bottom: none">
Event-Driven Declarative Orchestration Platform
Event-Driven Declarative Orchestrator
</h1>
<div align="center">
@@ -19,208 +19,374 @@
<br />
<p align="center">
<a href="https://x.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="X(formerly Twitter)" /></a> &nbsp;
<a href="https://twitter.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="twitter" /></a> &nbsp;
<a href="https://www.linkedin.com/company/kestra/"><img height="25" src="https://kestra.io/linkedin.svg" alt="linkedin" /></a> &nbsp;
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a> &nbsp;
</p>
<br />
<p align="center">
<a href="https://trendshift.io/repositories/2714" target="_blank">
<img src="https://trendshift.io/api/badge/repositories/2714" alt="kestra-io%2Fkestra | Trendshift" width="250" height="55"/>
</a>
<a href="https://www.producthunt.com/posts/kestra?embed=true&utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-kestra" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=612077&theme=light&period=daily&t=1740737506162" alt="Kestra - All&#0045;in&#0045;one&#0032;automation&#0032;&#0038;&#0032;orchestration&#0032;platform | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
</p>
<p align="center">
<a href="https://go.kestra.io/video/product-overview" target="_blank">
<img src="https://kestra.io/startvideo.png" alt="Get started in 3 minutes with Kestra" width="640px" />
<a href="https://www.youtube.com/watch?v=h-P0eK2xN58&ab_channel=Kestra" target="_blank">
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
</a>
</p>
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 3 minutes.</i></p>
<p align="center" style="color:grey;"><i>"Click on the image to get started in 4 minutes with Kestra."</i></p>
## Live Demo
Try Kestra using our [live demo](https://demo.kestra.io/ui/login?auto).
## What is Kestra
Kestra is a universal open-source orchestrator that makes both **scheduled** and **event-driven** workflows easy. By bringing **Infrastructure as Code** best practices to data, process, and microservice orchestration, you can build reliable workflows and manage them with confidence.
In just a few lines of code, you can [create a flow](https://kestra.io/docs/getting-started) directly from the UI. Thanks to the declarative YAML interface for defining orchestration logic, business stakeholders can participate in the workflow creation process.
Kestra offers a versatile set of **language-agnostic** developer tools while simultaneously providing an intuitive user interface tailored for business professionals. The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is always managed **declaratively in code**, even if some workflow components are modified in other ways (UI, CI/CD, Terraform, API calls).
## 🌟 What is Kestra?
Kestra is an open-source, event-driven orchestration platform that makes both **scheduled** and **event-driven** workflows easy. By bringing **Infrastructure as Code** best practices to data, process, and microservice orchestration, you can build reliable [workflows](https://kestra.io/docs/getting-started) directly from the UI in just a few lines of YAML.
**Key Features:**
- **Everything as Code and from the UI:** keep **workflows as code** with a **Git Version Control** integration, even when building them from the UI.
- **Event-Driven & Scheduled Workflows:** automate both **scheduled** and **real-time** event-driven workflows via a simple `trigger` definition.
- **Declarative YAML Interface:** define workflows using a simple configuration in the **built-in code editor**.
- **Rich Plugin Ecosystem:** hundreds of plugins built in to extract data from any database, cloud storage, or API, and **run scripts in any language**.
- **Intuitive UI & Code Editor:** build and visualize workflows directly from the UI with syntax highlighting, auto-completion and real-time syntax validation.
- **Scalable:** designed to handle millions of workflows, with high availability and fault tolerance.
- **Version Control Friendly:** write your workflows from the built-in code Editor and push them to your preferred Git branch directly from Kestra, enabling best practices with CI/CD pipelines and version control systems.
- **Structure & Resilience**: tame chaos and bring resilience to your workflows with **namespaces**, **labels**, **subflows**, **retries**, **timeout**, **error handling**, **inputs**, **outputs** that generate artifacts in the UI, **variables**, **conditional branching**, **advanced scheduling**, **event triggers**, **backfills**, **dynamic tasks**, **sequential and parallel tasks**, and skip tasks or triggers when needed by setting the flag `disabled` to `true`.
![Adding new tasks in the UI](https://kestra.io/adding-tasks.gif)
🧑‍💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
## Key concepts
1. `Flow` is the main component in Kestra. It's a container for your tasks and orchestration logic.
2. `Namespace` is used to provide logical isolation, e.g., to separate development and production environments. Namespaces are like folders on your file system — they organize flows into logical categories and can be nested to provide a hierarchical structure.
3. `Tasks` are atomic actions in a flow. By default, all tasks in the list will be executed sequentially, with additional customization options, a.o. to run tasks in parallel or allow a failure of specific tasks when needed.
4. `Triggers` define when a flow should run. In Kestra, flows are triggered based on events. Examples of such events include:
- a regular time-based **schedule**
- an **API** call (*webhook trigger*)
- ad-hoc execution from the **UI**
- a **flow trigger** - flows can be triggered from other flows using a [flow trigger](https://kestra.io/docs/developer-guide/triggers/flow) or a [subflow](https://kestra.io/docs/flow-examples/subflow), enabling highly modular workflows.
- **custom events**, including a new file arrival (*file detection event*), a new message in a message bus, query completion, and more.
5. `Inputs` allow you to pass runtime-specific variables to a flow. They are strongly typed, and allow additional [validation rules](https://kestra.io/docs/developer-guide/inputs#input-validation).
<p align="center">
<img src="https://kestra.io/adding-tasks.gif" alt="Adding new tasks in the UI">
</p>
## Extensible platform via plugins
---
Most tasks in Kestra are available as [plugins](https://kestra.io/plugins), but many type of tasks are available in the core library, including a.o. script tasks supporting various programming languages (e.g., Python, Node, Bash) and the ability to orchestrate your business logic packaged into Docker container images.
## 🚀 Quick Start
To create your own plugins, check the [plugin developer guide](https://kestra.io/docs/plugin-developer-guide).
### Get Started Locally in 5 Minutes
## Rich orchestration capabilities
#### Launch Kestra in Docker
Kestra provides a variety of tasks to handle both simple and complex business logic, including:
Make sure that Docker is running. Then, start Kestra in a single command:
```bash
docker run --pull=always --rm -it -p 8080:8080 --user=root \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /tmp:/tmp kestra/kestra:latest server local
```
If you're on Windows and use PowerShell:
```powershell
docker run --pull=always --rm -it -p 8080:8080 --user=root `
-v "/var/run/docker.sock:/var/run/docker.sock" `
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
If you're on Windows and use Command Prompt (CMD):
```cmd
docker run --pull=always --rm -it -p 8080:8080 --user=root ^
-v "/var/run/docker.sock:/var/run/docker.sock" ^
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
If you're on Windows and use WSL (Linux-based environment in Windows):
```bash
docker run --pull=always --rm -it -p 8080:8080 --user=root \
-v "/var/run/docker.sock:/var/run/docker.sock" \
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
Check our [Installation Guide](https://kestra.io/docs/installation) for other deployment options (Docker Compose, Podman, Kubernetes, AWS, GCP, Azure, and more).
Access the Kestra UI at [http://localhost:8080](http://localhost:8080) and start building your first flow!
#### Your First Hello World Flow
Create a new flow with the following content:
- subflows
- retries
- timeout
- error handling
- conditional branching
- dynamic tasks
- sequential and parallel tasks
- skipping tasks or triggers when needed by setting the flag `disabled` to `true`.
- configuring dependencies between tasks, flows and triggers
- advanced scheduling and trigger conditions
- backfills
- blueprints
- documenting your flows, tasks and triggers by adding a markdown description to any component
- adding labels to add additional metadata to your flows such as the flow owner or team:
```yaml
id: hello_world
id: getting_started
namespace: dev
description: |
# Getting Started
Let's `write` some **markdown** - [first flow](https://t.ly/Vemr0) 🚀
labels:
owner: rick.astley
project: never-gonna-give-you-up
tasks:
- id: hello
type: io.kestra.core.tasks.log.Log
message: Hello world!
description: a *very* important task
disabled: false
timeout: PT10M
retry:
type: constant # type: string
interval: PT15M # type: Duration
maxDuration: PT1H # type: Duration
maxAttempt: 5 # type: int
warningOnRetry: true # type: boolean, default is false
- id: parallel
type: io.kestra.core.tasks.flows.Parallel
concurrent: 3
tasks:
- id: task1
type: io.kestra.plugin.scripts.shell.Commands
commands:
- 'echo "running {{task.id}}"'
- 'sleep 2'
- id: task2
type: io.kestra.plugin.scripts.shell.Commands
commands:
- 'echo "running {{task.id}}"'
- 'sleep 1'
- id: task3
type: io.kestra.plugin.scripts.shell.Commands
commands:
- 'echo "running {{task.id}}"'
- 'sleep 3'
triggers:
- id: schedule
type: io.kestra.core.models.triggers.types.Schedule
cron: "*/15 * * * *"
backfill:
start: 2023-10-05T14:00:00Z
```
## Built-in code editor
You can write workflows directly from the UI. When writing your workflows, the UI provides:
- autocompletion
- syntax validation
- embedded plugin documentation
- example flows provided as blueprints
- topology view (view of your dependencies in a Directed Acyclic Graph) that get updated live as you modify and add new tasks.
## Stay up to date
We release new versions every month. Give the repository a star to stay up to date with the latest releases and get notified about future updates.
![Star the repo](https://kestra.io/star.gif)
## Getting Started
Follow the steps below to start local development.
### Prerequisites
Make sure that Docker is installed and running on your system. The default installation requires the following:
- [Docker](https://docs.docker.com/engine/install/)
- [Docker Compose](https://docs.docker.com/compose/install/)
### Launch Kestra
Download the Docker Compose file:
```sh
curl -o docker-compose.yml https://raw.githubusercontent.com/kestra-io/kestra/develop/docker-compose.yml
```
Alternatively, you can use `wget https://raw.githubusercontent.com/kestra-io/kestra/develop/docker-compose.yml`.
Start Kestra:
```sh
docker compose up -d
```
Open `http://localhost:8080` in your browser and create your first flow.
### Hello-World flow
Here is a simple example logging hello world message to the terminal:
```yaml
id: getting_started
namespace: dev
tasks:
- id: say_hello
type: io.kestra.plugin.core.log.Log
message: "Hello, World!"
- id: hello_world
type: io.kestra.core.tasks.log.Log
message: Hello World!
```
For more information:
Run the flow and see the output in the UI!
---
## 🧩 Plugin Ecosystem
Kestra's functionality is extended through a rich [ecosystem of plugins](https://kestra.io/plugins) that empower you to run tasks anywhere and code in any language, including Python, Node.js, R, Go, Shell, and more. Here's how Kestra plugins enhance your workflows:
- **Run Anywhere:**
- **Local or Remote Execution:** Execute tasks on your local machine, remote servers via SSH, or scale out to serverless containers using [Task Runners](https://kestra.io/docs/task-runners).
- **Docker and Kubernetes Support:** Seamlessly run Docker containers within your workflows or launch Kubernetes jobs to handle compute-intensive workloads.
- **Code in Any Language:**
- **Scripting Support:** Write scripts in your preferred programming language. Kestra supports Python, Node.js, R, Go, Shell, and others, allowing you to integrate existing codebases and deployment patterns.
- **Flexible Automation:** Execute shell commands, run SQL queries against various databases, and make HTTP requests to interact with APIs.
- **Event-Driven and Real-Time Processing:**
- **Real-Time Triggers:** React to events from external systems in real-time, such as file arrivals, new messages in message buses (Kafka, Redis, Pulsar, AMQP, MQTT, NATS, AWS SQS, Google Pub/Sub, Azure Event Hubs), and more.
- **Custom Events:** Define custom events to trigger flows based on specific conditions or external signals, enabling highly responsive workflows.
- **Cloud Integrations:**
- **AWS, Google Cloud, Azure:** Integrate with a variety of cloud services to interact with storage solutions, messaging systems, compute resources, and more.
- **Big Data Processing:** Run big data processing tasks using tools like Apache Spark or interact with analytics platforms like Google BigQuery.
- **Monitoring and Notifications:**
- **Stay Informed:** Send messages to Slack channels, email notifications, or trigger alerts in PagerDuty to keep your team updated on workflow statuses.
Kestra's plugin ecosystem is continually expanding, allowing you to tailor the platform to your specific needs. Whether you're orchestrating complex data pipelines, automating scripts across multiple environments, or integrating with cloud services, there's likely a plugin to assist. And if not, you can always [build your own plugins](https://kestra.io/docs/plugin-developer-guide/) to extend Kestra's capabilities.
🧑‍💻 **Note:** This is just a glimpse of what Kestra plugins can do. Explore the full list on our [Plugins Page](https://kestra.io/plugins).
---
## 📚 Key Concepts
- **Flows:** the core unit in Kestra, representing a workflow composed of tasks.
- **Tasks:** individual units of work, such as running a script, moving data, or calling an API.
- **Namespaces:** logical grouping of flows for organization and isolation.
- **Triggers:** schedule or events that initiate the execution of flows.
- **Inputs & Variables:** parameters and dynamic data passed into flows and tasks.
---
## 🎨 Build Workflows Visually
Kestra provides an intuitive UI that allows you to interactively build and visualize your workflows:
- **Drag-and-Drop Interface:** add and rearrange tasks from the Topology Editor.
- **Real-Time Validation:** instant feedback on your workflow's syntax and structure to catch errors early.
- **Auto-Completion:** smart suggestions as you type to write flow code quickly and without syntax errors.
- **Live Topology View:** see your workflow as a Directed Acyclic Graph (DAG) that updates in real-time.
---
- Follow the [getting started tutorial](https://kestra.io/docs/getting-started/).
- Read the [documentation](https://kestra.io/docs/) to understand how to:
- [Develop your flows](https://kestra.io/docs/developer-guide/)
- [Deploy Kestra](https://kestra.io/docs/administrator-guide/)
- Use our [Terraform provider](https://kestra.io/docs/terraform/) to deploy your flows
- Develop your [own plugins](https://kestra.io/docs/plugin-developer-guide/).
## 🔧 Extensible and Developer-Friendly
### Plugin Development
Create custom plugins to extend Kestra's capabilities. Check out our [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/) to get started.
## Plugins
Kestra is built on a [plugin system](https://kestra.io/plugins/). You can find your plugin to interact with your provider; alternatively, you can follow [these steps](https://kestra.io/docs/plugin-developer-guide/) to develop your own plugin.
### Infrastructure as Code
- **Version Control:** store your flows in Git repositories.
- **CI/CD Integration:** automate deployment of flows using CI/CD pipelines.
- **Terraform Provider:** manage Kestra resources with the [official Terraform provider](https://kestra.io/docs/terraform/).
For a full list of plugins, check the [plugins page](https://kestra.io/plugins/).
---
Here are some examples of the available plugins:
## 🌐 Join the Community
<table>
<tr>
<td><a href="https://kestra.io/plugins/plugin-airbyte#cloudjobs">Airbyte Cloud</a></td>
<td><a href="https://kestra.io/plugins/plugin-airbyte#connections">Airbyte OSS</a></td>
<td><a href="https://kestra.io/plugins/plugin-aws#athena">Amazon Athena</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-aws#cli">Amazon CLI</a></td>
<td><a href="https://kestra.io/plugins/plugin-aws#dynamodb">Amazon DynamoDb</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-redshift">Amazon Redshift</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-aws#s3">Amazon S3</a></td>
<td><a href="https://kestra.io/plugins/plugin-aws#sns">Amazon SNS</a></td>
<td><a href="https://kestra.io/plugins/plugin-aws#sqs">Amazon SQS</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-amqp">AMQP</a></td>
<td><a href="https://kestra.io/plugins/plugin-serdes#avro">Apache Avro</a></td>
<td><a href="https://kestra.io/plugins/plugin-cassandra">Apache Cassandra</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-kafka">Apache Kafka</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-pinot">Apache Pinot</a></td>
<td><a href="https://kestra.io/plugins/plugin-serdes#parquet">Apache Parquet</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-pulsar">Apache Pulsar</a></td>
<td><a href="https://kestra.io/plugins/plugin-spark">Apache Spark</a></td>
<td><a href="https://kestra.io/plugins/plugin-tika">Apache Tika</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-azure/#batchjob">Azure Batch</a></td>
<td><a href="https://kestra.io/plugins/plugin-azure/#storage-blob">Azure Blob Storage</a></td>
<td><a href="https://kestra.io/plugins/plugin-azure/#storagetable">Azure Blob Table</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-serdes#csv">CSV</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-clickhouse">ClickHouse</a></td>
<td><a href="https://kestra.io/plugins/plugin-compress">Compression</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-couchbase">Couchbase</a></td>
<td><a href="https://kestra.io/plugins/plugin-databricks">Databricks</a></td>
<td><a href="https://kestra.io/plugins/plugin-dbt#cloud">dbt cloud</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-dbt#cli">dbt core</a></td>
<td><a href="https://kestra.io/plugins/plugin-debezium-sqlserver">Debezium Microsoft SQL Server</a></td>
<td><a href="https://kestra.io/plugins/plugin-debezium-mysql">Debezium MYSQL</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-debezium-postgres">Debezium Postgres</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-duckdb">DuckDb</a></td>
<td><a href="https://kestra.io/plugins/plugin-elasticsearch">ElasticSearch</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-notifications#mail">Email</a></td>
<td><a href="https://kestra.io/plugins/plugin-fivetran">Fivetran</a></td>
<td><a href="https://kestra.io/plugins/plugin-fs#ftp">FTP</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-fs#ftps">FTPS</a></td>
<td><a href="https://kestra.io/plugins/plugin-git">Git</a></td>
<td><a href="https://kestra.io/plugins/plugin-gcp#bigquery">Google Big Query</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-gcp#pubsub">Google Pub/Sub</a></td>
<td><a href="https://kestra.io/plugins/plugin-gcp#gcs">Google Cloud Storage</a></td>
<td><a href="https://kestra.io/plugins/plugin-gcp#dataproc">Google DataProc</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-gcp#firestore">Google Firestore</a></td>
<td><a href="https://kestra.io/plugins/plugin-gcp#cli">Google Cli</a></td>
<td><a href="https://kestra.io/plugins/plugin-gcp#vertexai/">Google Vertex AI</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-gcp#gke">Google Kubernetes Engines</a></td>
<td><a href="https://kestra.io/plugins/plugin-googleworkspace#drive">Google Drive</a></td>
<td><a href="https://kestra.io/plugins/plugin-googleworkspace#sheets">Google Sheets</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-script-groovy">Groovy</a></td>
<td><a href="https://kestra.io/plugins/plugin-fs#http">Http</a></td>
<td><a href="https://kestra.io/plugins/plugin-serdes#json">JSON</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-script-julia">Julia</a></td>
<td><a href="https://kestra.io/plugins/plugin-script-jython">Jython</a></td>
<td><a href="https://kestra.io/plugins/plugin-kubernetes">Kubernetes</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-jdbc-sqlserver">Microsoft SQL Server</a></td>
<td><a href="https://kestra.io/plugins/plugin-notifications#teams">Microsoft Teams</a></td>
<td><a href="https://kestra.io/plugins/plugin-mongodb">MongoDb</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-mqtt">MQTT</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-mysql">MySQL</a></td>
<td><a href="https://kestra.io/plugins/plugin-script-nashorn">Nashorn</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-nats">NATS</a></td>
<td><a href="https://kestra.io/plugins/plugin-neo4j">Neo4j</a></td>
<td><a href="https://kestra.io/plugins/plugin-script-node">Node</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-openai">OpenAI</a></td>
<td><a href="https://kestra.io/plugins/plugin-crypto#openpgp">Open PGP</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-oracle">Oracle</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-jdbc-postgres">PostgreSQL</a></td>
<td><a href="https://kestra.io/plugins/plugin-powerbi">Power BI</a></td>
<td><a href="https://kestra.io/plugins/plugin-script-powershell">PowerShell</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-script-python">Python</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-rockset">Rockset</a></td>
<td><a href="https://kestra.io/plugins/plugin-script-r">RScript</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-fs#sftp">SFTP</a></td>
<td><a href="https://kestra.io/plugins/plugin-servicenow">ServiceNow</a></td>
<td><a href="https://kestra.io/plugins/plugin-singer">Singer</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-script-shell">Shell</a></td>
<td><a href="https://kestra.io/plugins/plugin-notifications#slack">Slack</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-snowflake">Snowflake</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-soda">Soda</a></td>
<td><a href="https://kestra.io/plugins/plugin-fs#ssh">SSH</a></td>
<td><a href="https://kestra.io/plugins/plugin-notifications#telegram">Telegram</a></td>
</tr>
<tr>
<td><a href="https://kestra.io/plugins/plugin-jdbc-trino">Trino</a></td>
<td><a href="https://kestra.io/plugins/plugin-serdes#xml">XML</a></td>
<td><a href="https://kestra.io/plugins/plugin-jdbc-vertica">Vertica</a></td>
</tr>
</table>
Stay connected and get support:
- **Slack:** Join our [Slack community](https://kestra.io/slack) to ask questions and share ideas.
- **LinkedIn:** Follow us on [LinkedIn](https://www.linkedin.com/company/kestra/) — next to Slack and GitHub, this is our main channel to share updates and product announcements.
- **YouTube:** Subscribe to our [YouTube channel](https://www.youtube.com/@kestra-io) for educational video content. We publish new videos every week!
- **X:** Follow us on [X](https://x.com/kestra_io) if you're still active there.
---
This list is growing quickly and we welcome contributions.
## 🤝 Contributing
## Community Support
We welcome contributions of all kinds!
If you need help or have any questions, reach out using one of the following channels:
- **Report Issues:** Found a bug or have a feature request? Open an [issue on GitHub](https://github.com/kestra-io/kestra/issues).
- **Contribute Code:** Check out our [Contributor Guide](https://kestra.io/docs/getting-started/contributing) for initial guidelines, and explore our [good first issues](https://go.kestra.io/contributing) for beginner-friendly tasks to tackle first.
- **Develop Plugins:** Build and share plugins using our [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/).
- **Contribute to our Docs:** Contribute edits or updates to keep our [documentation](https://github.com/kestra-io/docs) top-notch.
- [Slack](https://kestra.io/slack) - join the community and get the latest updates.
- [GitHub discussions](https://github.com/kestra-io/kestra/discussions) - useful to start a conversation that is not a bug or feature request.
- [Twitter](https://twitter.com/kestra_io) - to follow up with the latest updates.
---
## 📄 License
## Contributing
Kestra is licensed under the Apache 2.0 License © [Kestra Technologies](https://kestra.io).
We love contributions, big or small. Check out [our contributor guide](https://github.com/kestra-io/kestra/blob/develop/.github/CONTRIBUTING.md) for details on how to contribute to Kestra.
---
See our [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/) for details on developing and publishing Kestra plugins.
## ⭐️ Stay Updated
Give our repository a star to stay informed about the latest features and updates!
[![Star the Repo](https://kestra.io/star.gif)](https://github.com/kestra-io/kestra)
---
Thank you for considering Kestra for your workflow orchestration needs. We can't wait to see what you'll build!
## License
Apache 2.0 © [Kestra Technologies](https://kestra.io)

View File

@@ -1,33 +0,0 @@
# Security Policy
## Supported Versions
We provide security updates for the following versions of Kestra:
- The `latest` release
- Up to two previous minor versions released as a backport upon customer request.
If you are using an unsupported version, we recommend upgrading to the `latest` version to receive security fixes.
## Reporting a Vulnerability
If you discover a security vulnerability in Kestra, please report it to us privately to ensure a responsible disclosure process. You can contact our security team at:
**security@kestra.io**
### Guidelines for Reporting
- Provide a detailed description of the issue, including steps to reproduce it if possible.
- Do not disclose the vulnerability publicly until we have confirmed and patched the issue.
- If you believe the issue has critical severity, please indicate so in your report to help us prioritize.
## Our Commitment
- We will acknowledge your report within **2 business days**.
- We will work to verify and address the issue as quickly as possible.
- Once the issue is resolved, we will notify you of the fix.
## Acknowledgments
We are happy to credit those who report vulnerabilities responsibly in our release notes, unless you prefer to remain anonymous. If you would like to be acknowledged, please include this in your report.
Thank you for helping to make Kestra more secure!

View File

@@ -1,47 +0,0 @@
#!/bin/bash
set -e
# E2E main script that can be run on a dev computer or in the CI
# it will build the backend of the current git repo and the frontend
# create a docker image out of it
# run tests on this image
LOCAL_IMAGE_VERSION="local-e2e-$(date +%s)"
echo "Running E2E"
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
start_time=$(date +%s)
echo ""
echo "Building the image for this current repository"
make clean
make build-docker VERSION=$LOCAL_IMAGE_VERSION
end_time=$(date +%s)
elapsed=$(( end_time - start_time ))
echo ""
echo "building elapsed time: ${elapsed} seconds"
echo ""
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
start_time2=$(date +%s)
echo "cd ./ui"
cd ./ui
echo "npm i"
npm i
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"
end_time2=$(date +%s)
elapsed2=$(( end_time2 - start_time2 ))
echo ""
echo "Tests elapsed time: ${elapsed2} seconds"
echo ""
total_elapsed=$(( elapsed + elapsed2 ))
echo "Total elapsed time: ${total_elapsed} seconds"
echo ""
exit 0

View File

@@ -1,43 +1,29 @@
import net.e175.klaus.zip.ZipPrefixer
import org.owasp.dependencycheck.gradle.extension.AnalyzerExtension
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath "net.e175.klaus:zip-prefixer:0.3.1"
}
}
plugins {
// micronaut
id "java"
id 'java-library'
id "idea"
id "com.gradleup.shadow" version "8.3.9"
id "com.github.johnrengelman.shadow" version "8.1.1"
id "application"
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "6.3.1.5724"
id "org.sonarqube" version "5.0.0.4638"
id 'jacoco-report-aggregation'
// helper
id "com.github.ben-manes.versions" version "0.52.0"
id "com.github.ben-manes.versions" version "0.51.0"
// front
id 'com.github.node-gradle.node' version '7.1.0'
id 'org.siouan.frontend-jdk11' version '8.0.0' apply false
// release
id 'net.researchgate.release' version '3.1.0'
id "com.gorylenko.gradle-git-properties" version "2.5.3"
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
id 'net.researchgate.release' version '3.0.2'
id "com.gorylenko.gradle-git-properties" version "2.4.1"
id 'signing'
id "com.vanniktech.maven.publish" version "0.34.0"
// OWASP dependency check
id "org.owasp.dependencycheck" version "12.1.5" apply false
id 'ru.vyarus.pom' version '2.2.2' apply false
id 'ru.vyarus.github-info' version '2.0.0' apply false
}
idea {
@@ -50,17 +36,9 @@ idea {
/**********************************************************************************************************************\
* Main
**********************************************************************************************************************/
final mainClassName = "io.kestra.cli.App"
final targetJavaVersion = JavaVersion.VERSION_21
application {
mainClass = mainClassName
}
java {
sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion
}
mainClassName = "io.kestra.cli.App"
sourceCompatibility = 17
targetCompatibility = 17
dependencies {
implementation project(":cli")
@@ -71,321 +49,177 @@ dependencies {
* Dependencies
**********************************************************************************************************************/
allprojects {
group "io.kestra"
tasks.withType(GenerateModuleMetadata).configureEach {
suppressedValidationErrors.add('enforced-platform')
sourceCompatibility = 17
targetCompatibility = 17
repositories {
mavenCentral()
}
if (it.name != 'platform') {
group = "io.kestra"
// micronaut
apply plugin: "java"
apply plugin: "java-library"
apply plugin: "idea"
apply plugin: "jacoco"
java {
sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion
}
configurations {
developmentOnly // for dependencies that are needed for development only
}
repositories {
mavenCentral()
//
configurations.all {
resolutionStrategy {
force("org.slf4j:slf4j-api:$slf4jVersion")
// ugly bug on google cloud plugins
force("com.google.protobuf:protobuf-java:3.25.3")
force("com.google.protobuf:protobuf-java-util:3.25.3")
// ugly bug for elastic plugins
force("org.apache.httpcomponents:httpclient:4.5.14")
// ugly bug on crypto plugin
force('org.bouncycastle:bcprov-jdk18on:1.78')
force('org.bouncycastle:bcpg-jdk18on:1.78')
// ugly bug for jackson
force("com.fasterxml.jackson:jackson-bom:" + jacksonVersion)
force("com.fasterxml.jackson.core:jackson-bom:" + jacksonVersion)
force("com.fasterxml.jackson.core:jackson-core:" + jacksonVersion)
force("com.fasterxml.jackson.core:jackson-databind:" + jacksonVersion)
force("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:" + jacksonVersion)
force("com.fasterxml.jackson.module:jackson-module-parameter-names:" + jacksonVersion)
force("com.fasterxml.jackson.datatype:jackson-datatype-guava:" + jacksonVersion)
force("com.fasterxml.jackson.core:jackson-annotations:" + jacksonVersion)
force("com.fasterxml.jackson.dataformat:jackson-dataformat-smile:" + jacksonVersion)
force("com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:" + jacksonVersion)
force("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:" + jacksonVersion)
}
}
// dependencies
dependencies {
// lombok
annotationProcessor "org.projectlombok:lombok:" + lombokVersion
compileOnly 'org.projectlombok:lombok:' + lombokVersion
// micronaut
apply plugin: "java"
apply plugin: "java-library"
apply plugin: "idea"
apply plugin: "jacoco"
annotationProcessor platform("io.micronaut.platform:micronaut-platform:$micronautVersion")
annotationProcessor "io.micronaut:micronaut-inject-java"
annotationProcessor "io.micronaut.validation:micronaut-validation-processor"
configurations {
developmentOnly // for dependencies that are needed for development only
micronaut
}
implementation platform("io.micronaut.platform:micronaut-platform:$micronautVersion")
implementation "io.micronaut:micronaut-inject"
implementation "io.micronaut.validation:micronaut-validation"
implementation "io.micronaut:micronaut-runtime"
implementation "io.micronaut:micronaut-retry"
implementation "io.micronaut:micronaut-jackson-databind"
implementation "io.micronaut.data:micronaut-data-model"
implementation "io.micronaut:micronaut-management"
implementation "io.micrometer:micrometer-core"
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
implementation "io.micronaut:micronaut-http-client"
implementation "io.micronaut.reactor:micronaut-reactor-http-client"
// dependencies
dependencies {
// Platform
annotationProcessor enforcedPlatform(project(":platform"))
implementation enforcedPlatform(project(":platform"))
api enforcedPlatform(project(":platform"))
micronaut enforcedPlatform(project(":platform"))
// logs
implementation "org.slf4j:slf4j-api"
implementation "ch.qos.logback:logback-classic"
implementation "org.codehaus.janino:janino:3.1.12"
implementation group: 'org.apache.logging.log4j', name: 'log4j-to-slf4j', version: '2.23.1'
implementation group: 'org.slf4j', name: 'jul-to-slf4j', version: slf4jVersion
implementation group: 'org.slf4j', name: 'jcl-over-slf4j', version: slf4jVersion
implementation group: 'org.fusesource.jansi', name: 'jansi', version: '2.4.1'
// lombok
annotationProcessor "org.projectlombok:lombok"
compileOnly 'org.projectlombok:lombok'
// jackson
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: jacksonVersion
implementation group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names', version: jacksonVersion
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-guava', version: jacksonVersion
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310', version: jacksonVersion
// micronaut
annotationProcessor "io.micronaut:micronaut-inject-java"
annotationProcessor "io.micronaut.validation:micronaut-validation-processor"
micronaut "io.micronaut:micronaut-inject"
micronaut "io.micronaut.validation:micronaut-validation"
micronaut "io.micronaut.beanvalidation:micronaut-hibernate-validator"
micronaut "io.micronaut:micronaut-runtime"
micronaut "io.micronaut:micronaut-retry"
micronaut "io.micronaut:micronaut-jackson-databind"
micronaut "io.micronaut.data:micronaut-data-model"
micronaut "io.micronaut:micronaut-management"
micronaut "io.micrometer:micrometer-core"
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
micronaut "io.micronaut:micronaut-http-client"
micronaut "io.micronaut.reactor:micronaut-reactor-http-client"
micronaut "io.micronaut.tracing:micronaut-tracing-opentelemetry-http"
// kestra
implementation group: 'com.devskiller.friendly-id', name: 'friendly-id', version: '1.1.0'
implementation group: 'net.thisptr', name: 'jackson-jq', version: '1.0.0-preview.20240207'
// logs
implementation "org.slf4j:slf4j-api"
implementation "ch.qos.logback:logback-classic"
implementation "org.codehaus.janino:janino"
implementation group: 'org.apache.logging.log4j', name: 'log4j-to-slf4j'
implementation group: 'org.slf4j', name: 'jul-to-slf4j'
implementation group: 'org.slf4j', name: 'jcl-over-slf4j'
implementation group: 'org.fusesource.jansi', name: 'jansi'
// OTEL
implementation "io.opentelemetry:opentelemetry-exporter-otlp"
// jackson
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-annotations'
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml'
implementation group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names'
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-guava'
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310'
implementation group: 'com.fasterxml.uuid', name: 'java-uuid-generator'
// kestra
implementation group: 'com.devskiller.friendly-id', name: 'friendly-id'
implementation (group: 'net.thisptr', name: 'jackson-jq') {
exclude group: 'com.fasterxml.jackson.core'
}
// exposed utils
api group: 'com.google.guava', name: 'guava'
api group: 'commons-io', name: 'commons-io'
api group: 'org.apache.commons', name: 'commons-lang3'
api "io.swagger.core.v3:swagger-annotations"
}
// exposed utils
api group: 'com.google.guava', name: 'guava', version: '33.1.0-jre'
api group: 'commons-io', name: 'commons-io', version: '2.16.1'
api group: 'org.apache.commons', name: 'commons-lang3', version: '3.14.0'
api "io.swagger.core.v3:swagger-annotations"
}
}
/**********************************************************************************************************************\
* Test
**********************************************************************************************************************/
subprojects {subProj ->
if (subProj.name != 'platform' && subProj.name != 'jmh-benchmarks') {
apply plugin: "com.adarshr.test-logger"
java {
sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion
}
dependencies {
// Platform
testAnnotationProcessor enforcedPlatform(project(":platform"))
testImplementation enforcedPlatform(project(":platform"))
// lombok
testAnnotationProcessor "org.projectlombok:lombok:"
testCompileOnly 'org.projectlombok:lombok'
// micronaut
testAnnotationProcessor "io.micronaut:micronaut-inject-java"
testAnnotationProcessor "io.micronaut.validation:micronaut-validation-processor"
testImplementation "io.micronaut.test:micronaut-test-junit5"
testImplementation "org.junit.jupiter:junit-jupiter-engine"
testImplementation "org.junit.jupiter:junit-jupiter-params"
testImplementation "org.junit-pioneer:junit-pioneer"
testImplementation 'org.mockito:mockito-junit-jupiter'
// hamcrest
testImplementation 'org.hamcrest:hamcrest'
testImplementation 'org.hamcrest:hamcrest-library'
testImplementation 'org.exparity:hamcrest-date'
//assertj
testImplementation 'org.assertj:assertj-core'
}
test {
useJUnitPlatform()
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true;
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
// set Xmx for test workers
maxHeapSize = '4g'
// configure en_US default locale for tests
systemProperty 'user.language', 'en'
systemProperty 'user.country', 'US'
environment 'SECRET_MY_SECRET', "{\"secretKey\":\"secretValue\"}".bytes.encodeBase64().toString()
environment 'SECRET_NEW_LINE', "cGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2\nZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZl\neXJsb25n"
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
environment 'SECRET_NON_B64_SECRET', "some secret value"
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
environment 'ENV_TEST1', "true"
environment 'ENV_TEST2', "Pass by env"
if (subProj.name == 'core' || subProj.name == 'jdbc-h2' || subProj.name == 'jdbc-mysql' || subProj.name == 'jdbc-postgres') {
// JUnit 5 parallel settings
systemProperty 'junit.jupiter.execution.parallel.enabled', 'true'
systemProperty 'junit.jupiter.execution.parallel.mode.default', 'concurrent'
systemProperty 'junit.jupiter.execution.parallel.mode.classes.default', 'same_thread'
systemProperty 'junit.jupiter.execution.parallel.config.strategy', 'dynamic'
}
}
testlogger {
theme = 'mocha-parallel'
showExceptions = true
showFullStackTraces = true
showCauses = true
slowThreshold = 2000
showStandardStreams = true
showPassedStandardStreams = false
showSkippedStandardStreams = true
}
}
}
/**********************************************************************************************************************\
* End-to-End Tests
**********************************************************************************************************************/
def e2eTestsCheck = tasks.register('e2eTestsCheck') {
group = 'verification'
description = "Runs the 'check' task for all e2e-tests modules"
doFirst {
project.ext.set("e2e-tests", true)
}
}
subprojects {
// Add e2e-tests modules check tasks to e2eTestsCheck
if (project.name.startsWith("e2e-tests")) {
test {
onlyIf {
project.hasProperty("e2e-tests")
}
}
apply plugin: "com.adarshr.test-logger"
sourceCompatibility = 17
targetCompatibility = 17
dependencies {
// lombok
testAnnotationProcessor "org.projectlombok:lombok:" + lombokVersion
testCompileOnly 'org.projectlombok:lombok:' + lombokVersion
// micronaut
testAnnotationProcessor platform("io.micronaut.platform:micronaut-platform:$micronautVersion")
testAnnotationProcessor "io.micronaut:micronaut-inject-java"
testAnnotationProcessor "io.micronaut.validation:micronaut-validation-processor"
testImplementation platform("io.micronaut.platform:micronaut-platform:$micronautVersion")
testImplementation "io.micronaut.test:micronaut-test-junit5"
testImplementation "org.junit.jupiter:junit-jupiter-engine"
testImplementation "org.junit.jupiter:junit-jupiter-params"
testImplementation "org.junit-pioneer:junit-pioneer:2.2.0"
testImplementation 'org.mockito:mockito-junit-jupiter'
// hamcrest
testImplementation 'org.hamcrest:hamcrest:2.2'
testImplementation 'org.hamcrest:hamcrest-library:2.2'
testImplementation group: 'org.exparity', name: 'hamcrest-date', version: '2.0.8'
}
afterEvaluate {
// Add e2e-tests modules check tasks to e2eTestsCheck
if (project.name.startsWith("e2e-tests")) {
e2eTestsCheck.configure {
finalizedBy(check)
}
}
test {
useJUnitPlatform()
maxHeapSize = "4048m"
// configure en_US default locale for tests
systemProperty 'user.language', 'en'
systemProperty 'user.country', 'US'
environment 'SECRET_MY_SECRET', "{\"secretKey\":\"secretValue\"}".bytes.encodeBase64().toString()
environment 'SECRET_NEW_LINE', "cGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2\nZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZl\neXJsb25n"
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
environment 'SECRET_NON_B64_SECRET', "some secret value"
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
environment 'KESTRA_TEST1', "true"
environment 'KESTRA_TEST2', "Pass by env"
}
}
/**********************************************************************************************************************\
* Allure Reports
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
dependencies {
testImplementation platform("io.qameta.allure:allure-bom")
testImplementation "io.qameta.allure:allure-junit5"
}
configurations {
agent {
canBeResolved = true
canBeConsumed = true
}
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.24"
}
test {
jvmArgs = ["-javaagent:${configurations.agent.singleFile}"]
}
testlogger {
theme 'mocha-parallel'
showExceptions true
showFullStackTraces true
showCauses true
slowThreshold 2000
showStandardStreams true
showPassedStandardStreams false
showSkippedStandardStreams true
}
}
/**********************************************************************************************************************\
* Jacoco
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
apply plugin: 'jacoco'
test {
finalizedBy jacocoTestReport
}
jacocoTestReport {
dependsOn test
}
}
}
tasks.named('check') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
}
tasks.named('testCodeCoverageReport') {
dependsOn ':core:copyGradleProperties'
dependsOn ':ui:assembleFrontend'
}
/**********************************************************************************************************************\
* Sonar
**********************************************************************************************************************/
subprojects {
sonar {
properties {
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml"
}
}
}
sonar {
properties {
property "sonar.projectKey", "kestra-io_kestra"
property "sonar.organization", "kestra-io"
property "sonar.host.url", "https://sonarcloud.io"
}
}
/**********************************************************************************************************************\
* OWASP Dependency check
**********************************************************************************************************************/
apply plugin: 'org.owasp.dependencycheck'
dependencyCheck {
// fail only on HIGH and CRITICAL vulnerabilities, we may want to lower to 5 (mid-medium) later
failBuildOnCVSS = 7
// disable the .NET assembly analyzer as otherwise it wants to analyze EXE file
analyzers(new Action<AnalyzerExtension>() {
@Override
void execute(AnalyzerExtension analyzerExtension) {
analyzerExtension.assemblyEnabled = false
}
})
// configure a suppression file
suppressionFile = "$projectDir/owasp-dependency-suppressions.xml"
nvd.apiKey = System.getenv("NVD_API_KEY")
}
/**********************************************************************************************************************\
* Micronaut
**********************************************************************************************************************/
allprojects {
gradle.projectsEvaluated {
tasks.withType(JavaCompile).configureEach {
tasks.withType(JavaCompile) {
options.encoding = "UTF-8"
options.compilerArgs.add("-parameters")
options.compilerArgs.add("-Xlint:all")
@@ -394,7 +228,7 @@ allprojects {
}
}
tasks.withType(JavaCompile).configureEach {
tasks.withType(JavaCompile) {
options.encoding = "UTF-8"
options.compilerArgs.add("-parameters")
}
@@ -414,7 +248,7 @@ run.jvmArgs(
**********************************************************************************************************************/
jar {
manifest {
attributes(
attributes (
"Main-Class": mainClassName,
"X-Kestra-Name": project.name,
"X-Kestra-Title": project.name,
@@ -427,7 +261,7 @@ jar {
shadowJar {
archiveClassifier.set(null)
mergeServiceFiles()
zip64 = true
zip64 true
}
distZip.dependsOn shadowJar
@@ -435,233 +269,209 @@ distTar.dependsOn shadowJar
startScripts.dependsOn shadowJar
startShadowScripts.dependsOn jar
shadowJar.dependsOn 'ui:assembleFrontend'
shadowJar.dependsOn jar
/**********************************************************************************************************************\
* Executable Jar
**********************************************************************************************************************/
def executableDir = layout.buildDirectory.dir("executable")
def executable = layout.buildDirectory.file("executable/${project.name}-${project.version}").get().asFile
def executableDir = file("${buildDir}/executable")
def executable = file("${buildDir}/executable/${project.name}-${project.version}")
tasks.register('writeExecutableJar') {
group = "build"
description = "Write an executable jar from shadow jar"
task writeExecutableJar() {
group "build"
description "Write an executable jar from shadow jar"
dependsOn = [shadowJar]
final shadowJarFile = tasks.shadowJar.outputs.files.singleFile
inputs.file shadowJarFile
outputs.file executable
outputs.cacheIf { true }
doFirst {
executableDir.get().asFile.mkdirs()
executableDir.mkdirs()
}
doLast {
executable.setBytes(shadowJarFile.readBytes())
ByteArrayOutputStream executableBytes = new ByteArrayOutputStream()
executableBytes.write("\n: <<END_OF_KESTRA_SELFRUN\r\n".getBytes())
executableBytes.write(file("gradle/jar/selfrun.bat").readBytes())
executableBytes.write("\r\n".getBytes())
executableBytes.write("END_OF_KESTRA_SELFRUN\r\n\n".getBytes())
executableBytes.write(file("gradle/jar/selfrun.sh").readBytes())
ZipPrefixer.applyPrefixBytesToZip(executable.toPath(), executableBytes.toByteArray())
executable.write("")
executable.append("\n: <<END_OF_KESTRA_SELFRUN\r\n")
executable.append(file("gradle/jar/selfrun.bat").readBytes())
executable.append("\r\nEND_OF_KESTRA_SELFRUN\r\n\n")
executable.append(file("gradle/jar/selfrun.sh").readBytes())
executable.append(file("${buildDir}/libs/${project.name}-${project.version}.jar").readBytes())
executable.setExecutable(true)
}
}
tasks.register('executableJar', Zip) {
group = "build"
description = "Zip the executable jar"
task executableJar(type: Zip) {
group "build"
description "Zip the executable jar"
dependsOn = [writeExecutableJar]
archiveFileName = "${project.name}-${project.version}.zip"
destinationDirectory = layout.buildDirectory.dir('archives')
destinationDirectory = file("${buildDir}/archives")
from executableDir
archiveClassifier.set(null)
}
/**********************************************************************************************************************\
* Jacoco
**********************************************************************************************************************/
subprojects {
test {
finalizedBy jacocoTestReport
}
jacoco {
toolVersion = "0.8.11"
}
jacocoTestReport {
getAdditionalSourceDirs().setFrom(files(sourceSets.main.allSource.srcDirs))
getSourceDirectories().setFrom(files(sourceSets.main.allSource.srcDirs))
getClassDirectories().setFrom(files(sourceSets.main.output))
reports {
html.required = true
xml.required = true
}
}
sonar {
properties {
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml"
}
}
}
task jacoco(type: JacocoReport) {
dependsOn = subprojects.test
def javaProject = subprojects.findAll { it.name != 'ui' };
getAdditionalSourceDirs().setFrom(files(javaProject.sourceSets.main.allSource.srcDirs))
getSourceDirectories().setFrom(files(javaProject.sourceSets.main.allSource.srcDirs))
getClassDirectories().setFrom(files(javaProject.sourceSets.main.output))
getExecutionData().setFrom(files(javaProject.jacocoTestReport.executionData))
}
tasks.named('check') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
}
/**********************************************************************************************************************\
* Sonar
**********************************************************************************************************************/
sonar {
properties {
property "sonar.projectKey", "kestra-io_kestra"
property "sonar.organization", "kestra-io"
property "sonar.host.url", "https://sonarcloud.io"
}
}
/**********************************************************************************************************************\
* Standalone
**********************************************************************************************************************/
tasks.register('runLocal', JavaExec) {
task runLocal(type: JavaExec) {
group = "application"
description = "Run Kestra as server local"
classpath = project(":cli").sourceSets.main.runtimeClasspath
mainClass = mainClassName
environment 'MICRONAUT_ENVIRONMENTS', 'override'
args 'server', 'local', '--plugins', 'local/plugins'
}
tasks.register('runStandalone', JavaExec) {
group = "application"
description = "Run Kestra as server local"
classpath = project(":cli").sourceSets.main.runtimeClasspath
mainClass = mainClassName
environment 'MICRONAUT_ENVIRONMENTS', 'override'
args 'server', 'standalone', '--plugins', 'local/plugins'
}
/**********************************************************************************************************************\
* Publish
**********************************************************************************************************************/
subprojects {subProject ->
if (subProject.name != 'jmh-benchmarks' && subProject.name != rootProject.name) {
apply plugin: 'signing'
apply plugin: "com.vanniktech.maven.publish"
javadoc {
options {
locale = 'en_US'
encoding = 'UTF-8'
addStringOption("Xdoclint:none", "-quiet")
}
}
tasks.register('sourcesJar', Jar) {
dependsOn = [':core:copyGradleProperties']
dependsOn = [':ui:assembleFrontend']
archiveClassifier.set('sources')
from sourceSets.main.allSource
}
sourcesJar.dependsOn ':core:copyGradleProperties'
sourcesJar.dependsOn ':ui:assembleFrontend'
tasks.register('javadocJar', Jar) {
archiveClassifier.set('javadoc')
from javadoc
}
tasks.register('testsJar', Jar) {
group = 'build'
description = 'Build the tests jar'
archiveClassifier.set('tests')
if (sourceSets.matching { it.name == 'test'}) {
from sourceSets.named('test').get().output
}
}
//These modules should not be published
def unpublishedModules = ["jdbc-mysql", "jdbc-postgres", "webserver"]
if (subProject.name in unpublishedModules){
return
}
mavenPublishing {
publishToMavenCentral(true)
signAllPublications()
coordinates(
"${rootProject.group}",
subProject.name == "cli" ? rootProject.name : subProject.name,
"${rootProject.version}"
)
pom {
name = project.name
description = "${project.group}:${project.name}:${rootProject.version}"
url = "https://github.com/kestra-io/${rootProject.name}"
licenses {
license {
name = "The Apache License, Version 2.0"
url = "http://www.apache.org/licenses/LICENSE-2.0.txt"
}
}
developers {
developer {
id = "tchiotludo"
name = "Ludovic Dehon"
email = "ldehon@kestra.io"
}
}
scm {
connection = 'scm:git:'
url = "https://github.com/kestra-io/${rootProject.name}"
}
}
}
afterEvaluate {
publishing {
publications {
withType(MavenPublication).configureEach { publication ->
if (subProject.name == "platform") {
// Clear all artifacts except the BOM
publication.artifacts.clear()
}
}
}
}
}
if (subProject.name == 'cli') {
/* Make sure the special publication is wired *after* every plugin */
subProject.afterEvaluate {
/* 1. Remove the default java component so Gradle stops expecting
the standard cli-*.jar, sources, javadoc, etc. */
components.removeAll { it.name == "java" }
/* 2. Replace the publications artifacts with shadow + exec */
publishing.publications.withType(MavenPublication).configureEach { pub ->
pub.artifacts.clear()
// main shadow JAR built at root
pub.artifact(rootProject.tasks.named("shadowJar").get()) {
extension = "jar"
}
// executable ZIP built at root
pub.artifact(rootProject.tasks.named("executableJar").get().archiveFile) {
classifier = "exec"
extension = "zip"
}
pub.artifact(tasks.named("sourcesJar").get())
pub.artifact(tasks.named("javadocJar").get())
}
/* 3. Disable Gradle-module metadata for this publication to
avoid the “artifact removed from java component” error. */
tasks.withType(GenerateModuleMetadata).configureEach { it.enabled = false }
/* 4. Make every publish task in :cli wait for the two artifacts */
tasks.matching { it.name.startsWith("publish") }.configureEach {
dependsOn rootProject.tasks.named("shadowJar")
dependsOn rootProject.tasks.named("executableJar")
}
}
}
if (subProject.name != 'platform' && subProject.name != 'cli') {
// only if a test source set actually exists (avoids empty artifacts)
def hasTests = subProject.extensions.findByName('sourceSets')?.findByName('test') != null
if (hasTests) {
// wire the artifact onto every Maven publication of this subproject
publishing {
publications {
withType(MavenPublication).configureEach { pub ->
// keep the normal java component + sources/javadoc already configured
pub.artifact(subProject.tasks.named('testsJar').get())
}
}
}
// make sure publish tasks build the tests jar first
tasks.matching { it.name.startsWith('publish') }.configureEach {
dependsOn subProject.tasks.named('testsJar')
}
}
nexusPublishing {
repositoryDescription = "${project.group}:${rootProject.name}:${project.version}"
useStaging = !project.version.endsWith("-SNAPSHOT")
repositories {
sonatype {
nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/"))
snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/"))
}
}
}
subprojects {
apply plugin: "maven-publish"
apply plugin: 'signing'
apply plugin: 'ru.vyarus.pom'
apply plugin: 'ru.vyarus.github-info'
javadoc {
options {
locale = 'en_US'
encoding = 'UTF-8'
addStringOption("Xdoclint:none", "-quiet")
}
}
task sourcesJar(type: Jar) {
dependsOn = [':core:copyGradleProperties']
dependsOn = [':ui:assembleFrontend']
archiveClassifier.set('sources')
from sourceSets.main.allSource
}
sourcesJar.dependsOn ':core:copyGradleProperties'
sourcesJar.dependsOn ':ui:assembleFrontend'
task javadocJar(type: Jar) {
archiveClassifier.set('javadoc')
from javadoc
}
task testsJar(type: Jar) {
archiveClassifier.set('tests')
from sourceSets.test.output
}
github {
user 'kestra-io'
license 'Apache'
repository 'kestra'
site 'https://kestra.io'
}
pom {
description 'The modern, scalable orchestrator & scheduler open source platform'
developers {
developer {
id "tchiotludo"
name "Ludovic Dehon"
}
}
}
publishing {
publications {
sonatypePublication(MavenPublication) {
version project.version
if (project.name.contains('cli')) {
groupId "io.kestra"
artifactId "kestra"
artifact shadowJar
artifact executableJar
} else {
from components.java
groupId project.group
artifactId project.name
artifact sourcesJar
artifact javadocJar
artifact testsJar
}
}
}
}
signing {
// only sign JARs that we publish to Sonatype
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
sign publishing.publications.sonatypePublication
}
}
/**********************************************************************************************************************\
@@ -673,17 +483,10 @@ release {
tagCommitMessage = 'tag version'
newVersionCommitMessage = 'update snapshot version'
tagTemplate = 'v${version}'
pushReleaseVersionBranch = 'master'
buildTasks = ['classes']
git {
requireBranch.set('develop')
requireBranch.set('release')
}
// Dynamically set properties with default values
failOnSnapshotDependencies = providers.gradleProperty("release.failOnSnapshotDependencies")
.map(val -> Boolean.parseBoolean(val))
.getOrElse(true)
pushReleaseVersionBranch = providers.gradleProperty("release.pushReleaseVersionBranch")
.getOrElse(null)
}

View File

@@ -1,6 +1,3 @@
configurations {
implementation.extendsFrom(micronaut)
}
dependencies {
// micronaut
implementation "info.picocli:picocli"
@@ -9,18 +6,24 @@ dependencies {
implementation "io.micronaut:micronaut-http-server-netty"
// logs
implementation 'ch.qos.logback.contrib:logback-json-classic'
implementation 'ch.qos.logback.contrib:logback-jackson'
implementation 'ch.qos.logback.contrib:logback-json-classic:0.1.5'
implementation 'ch.qos.logback.contrib:logback-jackson:0.1.5'
// OTLP metrics
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
// aether still use javax.inject
compileOnly 'javax.inject:javax.inject:1'
// plugins
implementation 'org.eclipse.aether:aether-api:1.1.0'
implementation 'org.eclipse.aether:aether-spi:1.1.0'
implementation 'org.eclipse.aether:aether-util:1.1.0'
implementation 'org.eclipse.aether:aether-impl:1.1.0'
implementation 'org.eclipse.aether:aether-connector-basic:1.1.0'
implementation 'org.eclipse.aether:aether-transport-file:1.1.0'
implementation 'org.eclipse.aether:aether-transport-http:1.1.0'
implementation('org.apache.maven:maven-aether-provider:3.3.9') {
// sisu dependency injector is not used
exclude group: 'org.eclipse.sisu'
}
// modules
implementation project(":core")
implementation project(":script")
implementation project(":repository-memory")
@@ -33,13 +36,5 @@ dependencies {
implementation project(":storage-local")
// Kestra server components
implementation project(":executor")
implementation project(":scheduler")
implementation project(":webserver")
implementation project(":worker")
//test
testImplementation project(':tests')
testImplementation "org.wiremock:wiremock-jetty12"
}
}

View File

@@ -14,15 +14,13 @@ import io.micronaut.http.netty.body.NettyJsonHandler;
import io.micronaut.json.JsonMapper;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import picocli.CommandLine;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
import picocli.CommandLine;
public abstract class AbstractApiCommand extends AbstractCommand {
@CommandLine.Option(names = {"--server"}, description = "Kestra server url", defaultValue = "http://localhost:8080")
@@ -34,7 +32,7 @@ public abstract class AbstractApiCommand extends AbstractCommand {
@CommandLine.Option(names = {"--user"}, paramLabel = "<user:password>", description = "Server user and password")
protected String user;
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only)")
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only, when multi-tenancy is enabled)")
protected String tenantId;
@CommandLine.Option(names = {"--api-token"}, description = "API Token (EE only).")
@@ -45,18 +43,8 @@ public abstract class AbstractApiCommand extends AbstractCommand {
@Nullable
private HttpClientConfiguration httpClientConfiguration;
/**
* {@inheritDoc}
*/
protected boolean loadExternalPlugins() {
return false;
}
protected DefaultHttpClient client() throws URISyntaxException {
DefaultHttpClient defaultHttpClient = DefaultHttpClient.builder()
.uri(server.toURI())
.configuration(httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration())
.build();
DefaultHttpClient defaultHttpClient = new DefaultHttpClient(server.toURI(), httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration());
MessageBodyHandlerRegistry defaultHandlerRegistry = defaultHttpClient.getHandlerRegistry();
if (defaultHandlerRegistry instanceof ContextlessMessageBodyHandlerRegistry modifiableRegistry) {
modifiableRegistry.add(MediaType.TEXT_JSON_TYPE, new NettyJsonHandler<>(JsonMapper.createDefault()));
@@ -71,7 +59,7 @@ public abstract class AbstractApiCommand extends AbstractCommand {
if (this.user != null) {
List<String> split = Arrays.asList(this.user.split(":"));
String user = split.getFirst();
String user = split.get(0);
String password = String.join(":", split.subList(1, split.size()));
request.basicAuth(user, password);
@@ -84,19 +72,11 @@ public abstract class AbstractApiCommand extends AbstractCommand {
return request;
}
protected String apiUri(String path, String tenantId) {
protected String apiUri(String path) {
if (path == null || !path.startsWith("/")) {
throw new IllegalArgumentException("'path' must be non-null and start with '/'");
}
return "/api/v1/" + tenantId + path;
}
@Builder
@Value
@Jacksonized
public static class UpdateResult {
String id;
String namespace;
return tenantId == null ? "/api/v1" + path : "/api/v1/" + tenantId + path;
}
}

View File

@@ -4,17 +4,17 @@ import ch.qos.logback.classic.LoggerContext;
import com.google.common.collect.ImmutableMap;
import io.kestra.cli.commands.servers.ServerCommandInterface;
import io.kestra.cli.services.StartupHookInterface;
import io.kestra.core.plugins.PluginManager;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.webserver.services.FlowAutoLoaderService;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.yaml.YamlPropertySourceLoader;
import io.micronaut.core.annotation.Introspected;
import io.micronaut.http.uri.UriBuilder;
import io.micronaut.management.endpoint.EndpointDefaultConfiguration;
import io.micronaut.runtime.server.EmbeddedServer;
import jakarta.inject.Provider;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.client.utils.URIBuilder;
import io.kestra.core.contexts.KestraClassLoader;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.plugins.PluginScanner;
import io.kestra.core.plugins.RegisteredPlugin;
import io.kestra.core.utils.Rethrow;
import picocli.CommandLine;
@@ -25,22 +25,18 @@ import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.MessageFormat;
import java.time.temporal.ChronoUnit;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Callable;
import jakarta.inject.Inject;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
@Command(
versionProvider = VersionProvider.class,
@CommandLine.Command(
mixinStandardHelpOptions = true,
showDefaultValues = true
)
@Slf4j
@Introspected
public abstract class AbstractCommand implements Callable<Integer> {
abstract public class AbstractCommand implements Callable<Integer> {
@Inject
private ApplicationContext applicationContext;
@@ -50,31 +46,20 @@ public abstract class AbstractCommand implements Callable<Integer> {
@Inject
private StartupHookInterface startupHook;
@Inject
private io.kestra.core.utils.VersionProvider versionProvider;
@Inject
protected Provider<PluginRegistry> pluginRegistryProvider;
@Inject
protected Provider<PluginManager> pluginManagerProvider;
private PluginRegistry pluginRegistry;
@Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
@CommandLine.Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
private boolean[] verbose = new boolean[0];
@Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
@CommandLine.Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
private LogLevel logLevel = LogLevel.INFO;
@Option(names = {"--internal-log"}, description = "Change also log level for internal log")
@CommandLine.Option(names = {"--internal-log"}, description = "Change also log level for internal log")
private boolean internalLog = false;
@Option(names = {"-c", "--config"}, description = "Path to a configuration file")
@CommandLine.Option(names = {"-c", "--config"}, description = "Path to a configuration file")
private Path config = Paths.get(System.getProperty("user.home"), ".kestra/config.yml");
@Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
protected Path pluginsPath = Optional.ofNullable(System.getenv("KESTRA_PLUGINS_PATH")).map(Paths::get).orElse(null);
@CommandLine.Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
protected Path pluginsPath = System.getenv("KESTRA_PLUGINS_PATH") != null ? Paths.get(System.getenv("KESTRA_PLUGINS_PATH")) : null;
public enum LogLevel {
TRACE,
@@ -86,49 +71,17 @@ public abstract class AbstractCommand implements Callable<Integer> {
@Override
public Integer call() throws Exception {
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(Command.class).name());
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(CommandLine.Command.class).name());
startLogger();
sendServerLog();
if (this.startupHook != null) {
this.startupHook.start(this);
}
if (pluginRegistryProvider != null && this.pluginsPath != null && loadExternalPlugins()) {
pluginRegistry = pluginRegistryProvider.get();
pluginRegistry.registerIfAbsent(pluginsPath);
// PluginManager mus only be initialized if a registry is also instantiated
if (isPluginManagerEnabled()) {
PluginManager manager = pluginManagerProvider.get();
manager.start();
}
}
startWebserver();
return 0;
}
/**
* Specifies whether external plugins must be loaded.
* This method can be overridden by concrete commands.
*
* @return {@code true} if external plugins must be loaded.
*/
protected boolean loadExternalPlugins() {
return true;
}
/**
* Specifies whether the {@link PluginManager} service must be initialized.
* <p>
* This method can be overridden by concrete commands.
*
* @return {@code true} if the {@link PluginManager} service must be initialized.
*/
protected boolean isPluginManagerEnabled() {
return true;
}
private static String message(String message, Object... format) {
return CommandLine.Help.Ansi.AUTO.string(
format.length == 0 ? message : MessageFormat.format(message, format)
@@ -150,25 +103,8 @@ public abstract class AbstractCommand implements Callable<Integer> {
this.logLevel = LogLevel.TRACE;
}
if (this instanceof ServerCommandInterface) {
String buildInfo = "";
if (versionProvider.getRevision() != null) {
buildInfo += " [revision " + versionProvider.getRevision();
if (versionProvider.getDate() != null) {
buildInfo += " / " + versionProvider.getDate().toLocalDateTime().truncatedTo(ChronoUnit.MINUTES);
}
buildInfo += "]";
}
log.info(
"Starting Kestra {} with environments {}{}",
versionProvider.getVersion(),
applicationContext.getEnvironment().getActiveNames(),
buildInfo
);
log.info("Starting Kestra with environments {}", applicationContext.getEnvironment().getActiveNames());
}
((LoggerContext) org.slf4j.LoggerFactory.getILoggerFactory())
@@ -180,6 +116,7 @@ public abstract class AbstractCommand implements Callable<Integer> {
logger.getName().startsWith("io.kestra") &&
!logger.getName().startsWith("io.kestra.ee.runner.kafka.services"))
)
|| logger.getName().startsWith("flow")
)
.forEach(
logger -> logger.setLevel(ch.qos.logback.classic.Level.valueOf(this.logLevel.name()))
@@ -187,8 +124,11 @@ public abstract class AbstractCommand implements Callable<Integer> {
}
private void sendServerLog() {
if (log.isTraceEnabled() && pluginRegistry != null) {
pluginRegistry.plugins().forEach(c -> log.trace(c.toString()));
if (log.isTraceEnabled() && KestraClassLoader.instance().getPluginRegistry() != null) {
KestraClassLoader.instance()
.getPluginRegistry()
.getPlugins()
.forEach(c -> log.trace(c.toString()));
}
}
@@ -205,9 +145,9 @@ public abstract class AbstractCommand implements Callable<Integer> {
if (this.endpointConfiguration.getPort().isPresent()) {
URI endpoint = null;
try {
endpoint = UriBuilder.of(server.getURL().toURI())
.port(this.endpointConfiguration.getPort().get())
.path("/health")
endpoint = new URIBuilder(server.getURL().toURI())
.setPort(this.endpointConfiguration.getPort().get())
.setPath("/health")
.build();
} catch (URISyntaxException e) {
e.printStackTrace();
@@ -216,25 +156,13 @@ public abstract class AbstractCommand implements Callable<Integer> {
} else {
log.info("Server Running: {}", server.getURL());
}
if (isFlowAutoLoadEnabled()) {
applicationContext
.findBean(FlowAutoLoaderService.class)
.ifPresent(FlowAutoLoaderService::load);
}
});
}
public boolean isFlowAutoLoadEnabled() {
return false;
}
protected void shutdownHook(boolean logShutdown, Rethrow.RunnableChecked<Exception> run) {
protected void shutdownHook(Rethrow.RunnableChecked<Exception> run) {
Runtime.getRuntime().addShutdownHook(new Thread(
() -> {
if (logShutdown) {
log.warn("Receiving shutdown ! Try to graceful exit");
}
log.warn("Receiving shutdown ! Try to graceful exit");
try {
run.run();
} catch (Exception e) {
@@ -259,4 +187,19 @@ public abstract class AbstractCommand implements Callable<Integer> {
return ImmutableMap.of();
}
@SuppressWarnings("unused")
public PluginRegistry initPluginRegistry() {
if (this.pluginsPath == null || !this.pluginsPath.toFile().exists()) {
return null;
}
PluginScanner pluginScanner = new PluginScanner(KestraClassLoader.instance());
List<RegisteredPlugin> scan = pluginScanner.scan(this.pluginsPath);
PluginRegistry pluginRegistry = new PluginRegistry(scan);
KestraClassLoader.instance().setPluginRegistry(pluginRegistry);
return pluginRegistry;
}
}

View File

@@ -1,16 +1,15 @@
package io.kestra.cli;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.cli.commands.flows.FlowValidateCommand;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import picocli.CommandLine;
import java.io.IOException;
@@ -33,15 +32,6 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "the directory containing files to check")
protected Path directory;
@Inject
private TenantIdSelectorService tenantService;
/** {@inheritDoc} **/
@Override
protected boolean loadExternalPlugins() {
return local;
}
public static void handleException(ConstraintViolationException e, String resource) {
stdErr("\t@|fg(red) Unable to parse {0} due to the following error(s):|@", resource);
e.getConstraintViolations()
@@ -73,19 +63,19 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
public static String buildYamlBody(Path directory) throws IOException {
try(var files = Files.walk(directory)) {
return files.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.filter(YamlFlowParser::isValidExtension)
.map(throwFunction(path -> Files.readString(path, Charset.defaultCharset())))
.collect(Collectors.joining("\n---\n"));
}
}
// bug in micronaut, we can't inject ModelValidator, so we inject from implementation
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation
public Integer call(
Class<?> cls,
YamlFlowParser yamlFlowParser,
ModelValidator modelValidator,
Function<Object, String> identity,
Function<Object, List<String>> warningsFunction,
Function<Object, List<String>> infosFunction
Function<Object, List<String>> warningsFunction
) throws Exception {
super.call();
@@ -95,29 +85,27 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
if(this.local) {
try(var files = Files.walk(directory)) {
files.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.filter(YamlFlowParser::isValidExtension)
.forEach(path -> {
try {
Object parse = YamlParser.parse(path.toFile(), cls);
Object parse = yamlFlowParser.parse(path.toFile(), cls);
modelValidator.validate(parse);
stdOut("@|green \u2713|@ - " + identity.apply(parse));
List<String> warnings = warningsFunction.apply(parse);
warnings.forEach(warning -> stdOut("@|bold,yellow \u26A0|@ - " + warning));
List<String> infos = infosFunction.apply(parse);
infos.forEach(info -> stdOut("@|bold,blue \u2139|@ - " + info));
} catch (ConstraintViolationException e) {
stdErr("@|red \u2718|@ - " + path);
AbstractValidateCommand.handleException(e, clsName);
FlowValidateCommand.handleException(e, clsName);
returnCode.set(1);
}
});
}
} else {
String body = AbstractValidateCommand.buildYamlBody(directory);
String body = FlowValidateCommand.buildYamlBody(directory);
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/validate", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/validate"), body).contentType(MediaType.APPLICATION_YAML);
List<ValidateConstraintViolation> validations = client.toBlocking().retrieve(
this.requestOptions(request),
@@ -130,12 +118,12 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
stdOut("@|green \u2713|@ - " + validation.getIdentity());
} else {
stdErr("@|red \u2718|@ - " + validation.getIdentity(directory));
AbstractValidateCommand.handleValidateConstraintViolation(validation, clsName);
FlowValidateCommand.handleValidateConstraintViolation(validation, clsName);
returnCode.set(1);
}
}));
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, clsName);
FlowValidateCommand.handleHttpException(e, clsName);
return 1;
}

View File

@@ -2,16 +2,16 @@ package io.kestra.cli;
import io.kestra.cli.commands.configs.sys.ConfigCommand;
import io.kestra.cli.commands.flows.FlowCommand;
import io.kestra.cli.commands.migrations.MigrationCommand;
import io.kestra.cli.commands.namespaces.NamespaceCommand;
import io.kestra.cli.commands.plugins.PluginCommand;
import io.kestra.cli.commands.servers.ServerCommand;
import io.kestra.cli.commands.sys.SysCommand;
import io.kestra.cli.commands.templates.TemplateCommand;
import io.kestra.core.contexts.KestraClassLoader;
import io.micronaut.configuration.picocli.MicronautFactory;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.ApplicationContextBuilder;
import io.kestra.core.contexts.KestraApplicationContextBuilder;
import io.micronaut.context.env.Environment;
import io.micronaut.core.annotation.Introspected;
import org.slf4j.bridge.SLF4JBridgeHandler;
@@ -43,13 +43,12 @@ import java.util.concurrent.Callable;
SysCommand.class,
ConfigCommand.class,
NamespaceCommand.class,
MigrationCommand.class,
}
)
@Introspected
public class App implements Callable<Integer> {
public static void main(String[] args) {
execute(App.class, new String [] { Environment.CLI }, args);
execute(App.class, args);
}
@Override
@@ -57,30 +56,26 @@ public class App implements Callable<Integer> {
return PicocliRunner.call(App.class, "--help");
}
protected static void execute(Class<?> cls, String[] environments, String... args) {
protected static void execute(Class<?> cls, String... args) {
// Log Bridge
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
// Register a ClassLoader with isolation for plugins
Thread.currentThread().setContextClassLoader(KestraClassLoader.create(Thread.currentThread().getContextClassLoader()));
// Init ApplicationContext
ApplicationContext applicationContext = App.applicationContext(cls, environments, args);
ApplicationContext applicationContext = App.applicationContext(cls, args);
// Call Picocli command
int exitCode = 0;
try {
exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
} catch (CommandLine.InitializationException e){
System.err.println("Could not initialize picoli ComandLine, err: " + e.getMessage());
e.printStackTrace();
exitCode = 1;
}
int exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
applicationContext.close();
// exit code
System.exit(Objects.requireNonNullElse(exitCode, 0));
}
/**
* Create an {@link ApplicationContext} with additional properties based on configuration files (--config) and
* forced Properties from current command.
@@ -88,22 +83,17 @@ public class App implements Callable<Integer> {
* @param args args passed to java app
* @return the application context created
*/
protected static ApplicationContext applicationContext(Class<?> mainClass,
String[] environments,
String[] args) {
ApplicationContextBuilder builder = ApplicationContext
.builder()
protected static ApplicationContext applicationContext(Class<?> mainClass, String[] args) {
KestraApplicationContextBuilder builder = (KestraApplicationContextBuilder) new KestraApplicationContextBuilder()
.mainClass(mainClass)
.environments(environments);
.environments(Environment.CLI);
CommandLine cmd = new CommandLine(mainClass, CommandLine.defaultFactory());
continueOnParsingErrors(cmd);
CommandLine.ParseResult parseResult = cmd.parseArgs(args);
List<CommandLine> parsedCommands = parseResult.asCommandLineList();
CommandLine commandLine = parsedCommands.getLast();
CommandLine commandLine = parsedCommands.get(parsedCommands.size() - 1);
Class<?> cls = commandLine.getCommandSpec().userObject().getClass();
if (AbstractCommand.class.isAssignableFrom(cls)) {
@@ -114,7 +104,7 @@ public class App implements Callable<Integer> {
// if class have propertiesOverrides, add force properties for this class
Map<String, Object> propertiesOverrides = getPropertiesFromMethod(cls, "propertiesOverrides", null);
if (propertiesOverrides != null && isPracticalCommand(commandLine)) {
if (propertiesOverrides != null) {
properties.putAll(propertiesOverrides);
}
@@ -125,15 +115,17 @@ public class App implements Callable<Integer> {
.stream()
.filter(argSpec -> ((Field) argSpec.userObject()).getName().equals("serverPort"))
.findFirst()
.ifPresent(argSpec -> properties.put("micronaut.server.port", argSpec.getValue()));
.ifPresent(argSpec -> {
properties.put("micronaut.server.port", argSpec.getValue());
});
builder.properties(properties);
}
return builder.build();
}
private static void continueOnParsingErrors(CommandLine cmd) {
cmd.getCommandSpec().parser().collectErrors(true);
// add plugins registry if plugin path defined
builder.pluginRegistry(getPropertiesFromMethod(cls, "initPluginRegistry", commandLine.getCommandSpec().userObject()));
}
return builder.build();
}
@SuppressWarnings("unchecked")
@@ -152,12 +144,4 @@ public class App implements Callable<Integer> {
return null;
}
/**
* @param commandLine parsed command
* @return false if the command is a help or version request, true otherwise
*/
private static boolean isPracticalCommand(CommandLine commandLine) {
return !(commandLine.isUsageHelpRequested() || commandLine.isVersionHelpRequested());
}
}

View File

@@ -1,96 +0,0 @@
package io.kestra.cli;
import io.kestra.core.runners.*;
import io.kestra.core.server.Service;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.ExecutorsUtils;
import io.kestra.worker.DefaultWorker;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.annotation.Value;
import jakarta.annotation.PreDestroy;
import jakarta.inject.Inject;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
@SuppressWarnings("try")
@Slf4j
public class StandAloneRunner implements Runnable, AutoCloseable {
@Setter protected int workerThread = Math.max(3, Runtime.getRuntime().availableProcessors());
@Setter protected boolean schedulerEnabled = true;
@Setter protected boolean workerEnabled = true;
@Setter protected boolean indexerEnabled = true;
@Inject
private ExecutorsUtils executorsUtils;
@Inject
private ApplicationContext applicationContext;
@Value("${kestra.server.standalone.running.timeout:PT1M}")
private Duration runningTimeout;
private final List<Service> servers = new ArrayList<>();
private final AtomicBoolean running = new AtomicBoolean(false);
private ExecutorService poolExecutor;
@Override
public void run() {
running.set(true);
poolExecutor = executorsUtils.cachedThreadPool("standalone-runner");
poolExecutor.execute(applicationContext.getBean(ExecutorInterface.class));
if (workerEnabled) {
// FIXME: For backward-compatibility with Kestra 0.15.x and earliest we still used UUID for Worker ID instead of IdUtils
String workerID = UUID.randomUUID().toString();
Worker worker = applicationContext.createBean(DefaultWorker.class, workerID, workerThread, null);
applicationContext.registerSingleton(worker); //
poolExecutor.execute(worker);
servers.add(worker);
}
if (schedulerEnabled) {
Scheduler scheduler = applicationContext.getBean(Scheduler.class);
poolExecutor.execute(scheduler);
servers.add(scheduler);
}
if (indexerEnabled) {
Indexer indexer = applicationContext.getBean(Indexer.class);
poolExecutor.execute(indexer);
servers.add(indexer);
}
try {
Await.until(() -> servers.stream().allMatch(s -> Optional.ofNullable(s.getState()).orElse(Service.ServiceState.RUNNING).isRunning()), null, runningTimeout);
} catch (TimeoutException e) {
throw new RuntimeException(
servers.stream().filter(s -> !Optional.ofNullable(s.getState()).orElse(Service.ServiceState.RUNNING).isRunning())
.map(Service::getClass)
.toList() + " not started in time");
}
}
public boolean isRunning() {
return this.running.get();
}
@PreDestroy
@Override
public void close() throws Exception {
if (this.poolExecutor != null) {
this.poolExecutor.shutdown();
}
}
}

View File

@@ -1,18 +1,28 @@
package io.kestra.cli.commands;
import io.kestra.cli.AbstractApiCommand;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
import picocli.CommandLine;
import java.nio.file.Path;
public abstract class AbstractServiceNamespaceUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace to update")
@CommandLine.Parameters(index = "0", description = "the namespace to update")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The directory containing flow files for current namespace")
@CommandLine.Parameters(index = "1", description = "the directory containing files for current namespace")
public Path directory;
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "if missing should be deleted")
public boolean delete = false;
@Builder
@Value
@Jacksonized
public static class UpdateResult {
String id;
String namespace;
}
}

View File

@@ -8,7 +8,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "configs",
description = "Manage configuration",
description = "handle configs",
mixinStandardHelpOptions = true,
subcommands = {
ConfigPropertiesCommand.class,

View File

@@ -10,7 +10,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "properties",
description = {"Display current configuration properties."}
description = {"Display actual configurations properties."}
)
@Slf4j
public class ConfigPropertiesCommand extends AbstractCommand {

View File

@@ -10,7 +10,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "flow",
description = "Manage flows",
description = "handle flows",
mixinStandardHelpOptions = true,
subcommands = {
FlowValidateCommand.class,
@@ -18,8 +18,6 @@ import picocli.CommandLine;
FlowNamespaceCommand.class,
FlowDotCommand.class,
FlowExportCommand.class,
FlowUpdateCommand.class,
FlowUpdatesCommand.class
}
)
@Slf4j

View File

@@ -1,64 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.nio.file.Files;
import java.nio.file.Path;
@CommandLine.Command(
name = "create",
description = "Create a single flow",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowCreateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
public Path flowFile;
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
checkFile();
String body = Files.readString(flowFile);
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
client.toBlocking().retrieve(
this.requestOptions(request),
String.class
);
stdOut("Flow successfully created !");
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
return 0;
}
protected void checkFile() {
if (!Files.isRegularFile(flowFile)) {
throw new IllegalArgumentException("The file '" + flowFile.toFile().getAbsolutePath() + "' is not a file");
}
}
}

View File

@@ -1,52 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "delete",
description = "Delete a single flow",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowDeleteCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace of the flow")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The ID of the flow")
public String id;
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.DELETE(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)));
client.toBlocking().exchange(
this.requestOptions(request)
);
stdOut("Flow successfully deleted !");
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
return 0;
}
}

View File

@@ -3,7 +3,7 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.hierarchies.GraphCluster;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import io.kestra.core.services.Graph2DotService;
import io.kestra.core.utils.GraphUtils;
import io.micronaut.context.ApplicationContext;
@@ -15,21 +15,22 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "dot",
description = "Generate a DOT graph from a file"
description = "generate a dot graph from a file"
)
@Slf4j
public class FlowDotCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Parameters(index = "0", description = "The flow file to display")
@CommandLine.Parameters(index = "0", description = "the flow file to display")
private Path file;
@Override
public Integer call() throws Exception {
super.call();
Flow flow = YamlParser.parse(file.toFile(), Flow.class);
YamlFlowParser parser = applicationContext.getBean(YamlFlowParser.class);
Flow flow = parser.parse(file.toFile(), Flow.class);
GraphCluster graph = GraphUtils.of(flow, null);

View File

@@ -3,7 +3,7 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -12,14 +12,17 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "expand",
description = "Deprecated - expand a flow"
description = "deprecated - expand a flow"
)
@Deprecated
public class FlowExpandCommand extends AbstractCommand {
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
@CommandLine.Parameters(index = "0", description = "the flow file to expand")
private Path file;
@Inject
private YamlFlowParser yamlFlowParser;
@Inject
private ModelValidator modelValidator;
@@ -28,7 +31,7 @@ public class FlowExpandCommand extends AbstractCommand {
super.call();
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
Flow flow = YamlParser.parse(content, Flow.class);
Flow flow = yamlFlowParser.parse(content, Flow.class);
modelValidator.validate(flow);
stdOut(content);
return 0;

View File

@@ -1,8 +1,7 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.context.ApplicationContext;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
@@ -18,20 +17,21 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "export",
description = "Export flows to a ZIP file",
description = "export flows to a zip file",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowExportCommand extends AbstractApiCommand {
private static final String DEFAULT_FILE_NAME = "flows.zip";
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
@Inject
private TenantIdSelectorService tenantService;
private ApplicationContext applicationContext;
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of flows to export")
@CommandLine.Option(names = {"--namespace"}, description = "the namespace of flows to export")
public String namespace;
@CommandLine.Parameters(index = "0", description = "The directory to export the ZIP file to")
@CommandLine.Parameters(index = "0", description = "the directory to export the file to")
public Path directory;
@Override
@@ -40,7 +40,7 @@ public class FlowExportCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<Object> request = HttpRequest
.GET(apiUri("/flows/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
.GET(apiUri("/flows/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
.accept(MediaType.APPLICATION_OCTET_STREAM);
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
@@ -50,7 +50,7 @@ public class FlowExportCommand extends AbstractApiCommand {
stdOut("Exporting flow(s) for namespace '" + namespace + "' successfully done !");
} catch (HttpClientResponseException e) {
AbstractValidateCommand.handleHttpException(e, "flow");
FlowValidateCommand.handleHttpException(e, "flow");
return 1;
}

View File

@@ -1,17 +1,16 @@
package io.kestra.cli.commands.flows;
import com.google.common.collect.ImmutableMap;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.exceptions.MissingRequiredArgument;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.cli.StandAloneRunner;
import io.kestra.runner.memory.MemoryRunner;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import picocli.CommandLine;
@@ -25,22 +24,23 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;
import jakarta.validation.ConstraintViolationException;
@CommandLine.Command(
name = "test",
description = "Test a flow"
description = "test a flow"
)
@Slf4j
public class FlowTestCommand extends AbstractApiCommand {
public class FlowTestCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Parameters(index = "0", description = "The flow file to test")
@CommandLine.Parameters(index = "0", description = "the flow file to test")
private Path file;
@CommandLine.Parameters(
index = "1..*",
description = "The inputs to pass as key pair value separated by space, " +
description = "the inputs to pass as key pair value separated by space, " +
"for input type file, you need to pass an absolute path."
)
private List<String> inputs = new ArrayList<>();
@@ -72,11 +72,11 @@ public class FlowTestCommand extends AbstractApiCommand {
public Integer call() throws Exception {
super.call();
MemoryRunner runner = applicationContext.getBean(MemoryRunner.class);
LocalFlowRepositoryLoader repositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
FlowRepositoryInterface flowRepository = applicationContext.getBean(FlowRepositoryInterface.class);
FlowInputOutput flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
RunnerUtils runnerUtils = applicationContext.getBean(RunnerUtils.class);
TenantIdSelectorService tenantService = applicationContext.getBean(TenantIdSelectorService.class);
Map<String, Object> inputs = new HashMap<>();
@@ -88,9 +88,9 @@ public class FlowTestCommand extends AbstractApiCommand {
inputs.put(this.inputs.get(i), this.inputs.get(i+1));
}
try (StandAloneRunner runner = applicationContext.createBean(StandAloneRunner.class);){
try {
runner.run();
repositoryLoader.load(tenantService.getTenantId(tenantId), file.toFile());
repositoryLoader.load(file.toFile());
List<Flow> all = flowRepository.findAllForAllTenants();
if (all.size() != 1) {
@@ -98,14 +98,18 @@ public class FlowTestCommand extends AbstractApiCommand {
}
runnerUtils.runOne(
all.getFirst(),
(flow, execution) -> flowInputOutput.readExecutionInputs(flow, execution, inputs),
all.get(0),
(flow, execution) -> flowInputOutput.typedInputs(flow, execution, inputs),
Duration.ofHours(1)
);
} catch (ConstraintViolationException e) {
runner.close();
} catch (MissingRequiredArgument e) {
throw new CommandLine.ParameterException(this.spec.commandLine(), e.getMessage());
} catch (IOException | TimeoutException e) {
throw new IllegalStateException(e);
} catch (ConstraintViolationException e) {
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow", e);
} finally {
applicationContext.getProperty("kestra.storage.local.base-path", Path.class)
.ifPresent(path -> {

View File

@@ -1,69 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.nio.file.Files;
import java.nio.file.Path;
@CommandLine.Command(
name = "update",
description = "Update a single flow",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
public Path flowFile;
@CommandLine.Parameters(index = "1", description = "The namespace of the flow")
public String namespace;
@CommandLine.Parameters(index = "2", description = "The ID of the flow")
public String id;
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
checkFile();
String body = Files.readString(flowFile);
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.PUT(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
client.toBlocking().retrieve(
this.requestOptions(request),
String.class
);
stdOut("Flow successfully updated !");
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
return 0;
}
protected void checkFile() {
if (!Files.isRegularFile(flowFile)) {
throw new IllegalArgumentException("The file '" + flowFile.toFile().getAbsolutePath() + "' is not a file");
}
}
}

View File

@@ -1,100 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.serializers.YamlParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
@CommandLine.Command(
name = "updates",
description = "Create or update flows from a folder, and optionally delete the ones not present",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowUpdatesCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The directory containing files")
public Path directory;
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
public boolean delete = false;
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
public String namespace;
@Inject
private TenantIdSelectorService tenantIdSelectorService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
try (var files = Files.walk(directory)) {
List<String> flows = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.map(path -> {
try {
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.toList();
String body = "";
if (flows.isEmpty()) {
stdOut("No flow found on '{}'", directory.toFile().getAbsolutePath());
} else {
body = String.join("\n---\n", flows);
}
try(DefaultHttpClient client = client()) {
String namespaceQuery = "";
if (namespace != null) {
namespaceQuery = "&namespace=" + namespace;
}
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/bulk", tenantIdSelectorService.getTenantId(tenantId)) + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),
Argument.listOf(UpdateResult.class)
);
stdOut(updated.size() + " flow(s) successfully updated !");
updated.forEach(flow -> stdOut("- " + flow.getNamespace() + "." + flow.getId()));
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
} catch (ConstraintViolationException e) {
AbstractValidateCommand.handleException(e, "flow");
return 1;
}
return 0;
}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,9 +1,9 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlFlowParser;
import io.kestra.core.services.FlowService;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -13,9 +13,11 @@ import java.util.List;
@CommandLine.Command(
name = "validate",
description = "Validate a flow"
description = "validate a flow"
)
public class FlowValidateCommand extends AbstractValidateCommand {
@Inject
private YamlFlowParser yamlFlowParser;
@Inject
private ModelValidator modelValidator;
@@ -23,28 +25,22 @@ public class FlowValidateCommand extends AbstractValidateCommand {
@Inject
private FlowService flowService;
@Inject
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
return this.call(
FlowWithSource.class,
Flow.class,
yamlFlowParser,
modelValidator,
(Object object) -> {
FlowWithSource flow = (FlowWithSource) object;
Flow flow = (Flow) object;
return flow.getNamespace() + " / " + flow.getId();
},
(Object object) -> {
FlowWithSource flow = (FlowWithSource) object;
Flow flow = (Flow) object;
List<String> warnings = new ArrayList<>();
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
warnings.addAll(flowService.warnings(flow, tenantService.getTenantId(tenantId)));
warnings.addAll(flowService.warnings(flow));
return warnings;
},
(Object object) -> {
FlowWithSource flow = (FlowWithSource) object;
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
}
);
}

View File

@@ -28,7 +28,7 @@ public abstract class IncludeHelperExpander {
// handle single line directly with the suffix (should be between quotes or double-quotes
if(include.size() == 1) {
String singleInclude = include.getFirst();
String singleInclude = include.get(0);
return prefix + singleInclude + suffix;
}

View File

@@ -9,7 +9,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "namespace",
description = "Manage namespace flows",
description = "handle namespace flows",
mixinStandardHelpOptions = true,
subcommands = {
FlowNamespaceUpdateCommand.class,

View File

@@ -1,10 +1,9 @@
package io.kestra.cli.commands.flows.namespaces;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
import io.kestra.cli.commands.flows.FlowValidateCommand;
import io.kestra.cli.commands.flows.IncludeHelperExpander;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
@@ -12,30 +11,27 @@ import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import jakarta.validation.ConstraintViolationException;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.util.List;
import java.util.stream.Collectors;
@CommandLine.Command(
name = "update",
description = "Update flows in namespace",
description = "handle namespace flows",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
public boolean override = false;
@Inject
private TenantIdSelectorService tenantService;
public YamlFlowParser yamlFlowParser;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
@@ -43,7 +39,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
try (var files = Files.walk(directory)) {
List<String> flows = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.filter(YamlFlowParser::isValidExtension)
.map(path -> {
try {
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
@@ -51,7 +47,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
throw new RuntimeException(e);
}
})
.toList();
.collect(Collectors.toList());
String body = "";
if (flows.isEmpty()) {
@@ -59,12 +55,9 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
} else {
body = String.join("\n---\n", flows);
}
if (override) {
body = body.replaceAll("(?m)^namespace:.+", "namespace: " + namespace);
}
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/") + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),
@@ -74,11 +67,11 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
stdOut(updated.size() + " flow(s) for namespace '" + namespace + "' successfully updated !");
updated.forEach(flow -> stdOut("- " + flow.getNamespace() + "." + flow.getId()));
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
FlowValidateCommand.handleHttpException(e, "flow");
return 1;
}
} catch (ConstraintViolationException e) {
AbstractValidateCommand.handleException(e, "flow");
FlowValidateCommand.handleException(e, "flow");
return 1;
}

View File

@@ -1,29 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "migrate",
description = "handle migrations",
mixinStandardHelpOptions = true,
subcommands = {
TenantMigrationCommand.class,
}
)
@Slf4j
public class MigrationCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "migrate", "--help");
return 0;
}
}

View File

@@ -1,49 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.repositories.TenantMigrationInterface;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
@CommandLine.Command(
name = "default-tenant",
description = "migrate every elements from no tenant to the main tenant"
)
@Slf4j
public class TenantMigrationCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@Option(names = "--tenant-id", description = "tenant identifier")
String tenantId;
@Option(names = "--tenant-name", description = "tenant name")
String tenantName;
@Option(names = "--dry-run", description = "Preview only, do not update")
boolean dryRun;
@Override
public Integer call() throws Exception {
super.call();
if (dryRun) {
System.out.println("🧪 Dry-run mode enabled. No changes will be applied.");
}
TenantMigrationService migrationService = this.applicationContext.getBean(TenantMigrationService.class);
try {
migrationService.migrateTenant(tenantId, tenantName, dryRun);
System.out.println("✅ Tenant migration complete.");
} catch (Exception e) {
System.err.println("❌ Tenant migration failed: " + e.getMessage());
e.printStackTrace();
return 1;
}
return 0;
}
}

View File

@@ -1,56 +0,0 @@
package io.kestra.cli.commands.migrations;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import com.github.javaparser.utils.Log;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.TenantMigrationInterface;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
@Singleton
@Slf4j
public class TenantMigrationService {
@Inject
private TenantMigrationInterface tenantMigrationInterface;
@Inject
private FlowRepositoryInterface flowRepository;
@Inject
@Named(QueueFactoryInterface.FLOW_NAMED)
private QueueInterface<FlowInterface> flowQueue;
public void migrateTenant(String tenantId, String tenantName, boolean dryRun) {
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
throw new KestraRuntimeException("Tenant configuration is an enterprise feature. It can only be main in OSS");
}
Log.info("🔁 Starting tenant migration...");
tenantMigrationInterface.migrateTenant(MAIN_TENANT, dryRun);
migrateQueue(dryRun);
}
protected void migrateQueue(boolean dryRun) {
if (!dryRun){
log.info("🔁 Starting restoring queue...");
flowRepository.findAllWithSourceForAllTenants().forEach(flow -> {
try {
flowQueue.emit(flow);
} catch (QueueException e) {
log.warn("Unable to send the flow {} to the queue", flow.uid(), e);
}
});
}
}
}

View File

@@ -3,7 +3,6 @@ package io.kestra.cli.commands.namespaces;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.kestra.cli.commands.namespaces.files.NamespaceFilesCommand;
import io.kestra.cli.commands.namespaces.kv.KvCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@@ -11,11 +10,10 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "namespace",
description = "Manage namespaces",
description = "handle namespaces",
mixinStandardHelpOptions = true,
subcommands = {
NamespaceFilesCommand.class,
KvCommand.class
NamespaceFilesCommand.class
}
)
@Slf4j

View File

@@ -9,7 +9,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "files",
description = "Manage namespace files",
description = "handle namespace files",
mixinStandardHelpOptions = true,
subcommands = {
NamespaceFilesUpdateCommand.class,

View File

@@ -2,14 +2,12 @@ package io.kestra.cli.commands.namespaces.files;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.utils.KestraIgnore;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.multipart.MultipartBody;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -19,26 +17,23 @@ import java.util.List;
@CommandLine.Command(
name = "update",
description = "Update namespace files",
description = "update namespace files",
mixinStandardHelpOptions = true
)
@Slf4j
public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace to update")
@CommandLine.Parameters(index = "0", description = "the namespace to update")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The local directory containing files for current namespace")
@CommandLine.Parameters(index = "1", description = "the local directory containing files for current namespace")
public Path from;
@CommandLine.Parameters(index = "2", description = "The remote namespace path to upload files to", defaultValue = "/")
@CommandLine.Parameters(index = "2", description = "the remote namespace path to upload files to", defaultValue = "/")
public String to;
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "if missing should be deleted")
public boolean delete = false;
@Inject
private TenantIdSelectorService tenantService;
private static final String KESTRA_IGNORE_FILE = ".kestraignore";
@Override
@@ -49,7 +44,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
try (var files = Files.walk(from); DefaultHttpClient client = client()) {
if (delete) {
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + to, null)));
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/") + namespace + "/files?path=" + to, null)));
}
KestraIgnore kestraIgnore = new KestraIgnore(from);
@@ -67,7 +62,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
client.toBlocking().exchange(
this.requestOptions(
HttpRequest.POST(
apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + destination,
apiUri("/namespaces/") + namespace + "/files?path=" + destination,
body
).contentType(MediaType.MULTIPART_FORM_DATA)
)

View File

@@ -1,29 +0,0 @@
package io.kestra.cli.commands.namespaces.kv;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "kv",
description = "Manage KV Store",
mixinStandardHelpOptions = true,
subcommands = {
KvUpdateCommand.class,
}
)
@Slf4j
public class KvCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "namespace", "kv", "--help");
return 0;
}
}

View File

@@ -1,95 +0,0 @@
package io.kestra.cli.commands.namespaces.kv;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
@CommandLine.Command(
name = "update",
description = "Update value for a KV Store key",
mixinStandardHelpOptions = true
)
@Slf4j
public class KvUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace to update")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The key to update")
public String key;
@CommandLine.Parameters(index = "2", description = "The value to assign to the key. If the value is an object, it must be in JSON format. If the value must be read from file, use -f parameter.")
public String value;
@Option(names = {"-e", "--expiration"}, description = "The duration after which the key should expire.")
public String expiration;
@Option(names = {"-t", "--type"}, description = "The type of the value. Optional and useful to override the deduced type (eg. numbers, booleans or JSON as full string). Valid values: ${COMPLETION-CANDIDATES}.")
public Type type;
@Option(names = {"-f", "--file-value"}, description = "The file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
public Path fileValue;
@Inject
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
super.call();
if (fileValue != null) {
value = Files.readString(Path.of(fileValue.toString().trim()));
}
if (isLiteral(value) || type == Type.STRING) {
value = wrapAsJsonLiteral(value);
}
Duration ttl = expiration == null ? null : Duration.parse(expiration);
MutableHttpRequest<String> request = HttpRequest
.PUT(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/kv/" + key, value)
.contentType(MediaType.APPLICATION_JSON_TYPE);
if (ttl != null) {
request.header("ttl", ttl.toString());
}
try (DefaultHttpClient client = client()) {
client.toBlocking().exchange(this.requestOptions(request));
}
return 0;
}
private static boolean isLiteral(final String input) {
// use ION mapper to properly handle timestamp
ObjectMapper mapper = JacksonMapper.ofIon();
try {
mapper.readTree(input);
return false;
} catch (JsonProcessingException e) {
return true;
}
}
public static String wrapAsJsonLiteral(final String input) {
return "\"" + input.replace("\"", "\\\"") + "\"";
}
enum Type {
STRING, NUMBER, BOOLEAN, DATETIME, DATE, DURATION, JSON;
}
}

View File

@@ -1,37 +1,31 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import picocli.CommandLine.Command;
import lombok.extern.slf4j.Slf4j;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import picocli.CommandLine;
@Command(
@CommandLine.Command(
name = "plugins",
description = "Manage plugins",
description = "handle plugins",
mixinStandardHelpOptions = true,
subcommands = {
PluginInstallCommand.class,
PluginUninstallCommand.class,
PluginListCommand.class,
PluginDocCommand.class,
PluginSearchCommand.class
PluginDocCommand.class
}
)
@Slf4j
public class PluginCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "plugins", "--help");
PicocliRunner.call(App.class, "plugins", "--help");
return 0;
}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,11 +1,10 @@
package io.kestra.cli.commands.plugins;
import com.google.common.io.Files;
import com.google.common.base.Charsets;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.docs.DocumentationGenerator;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.plugins.PluginScanner;
import io.kestra.core.plugins.RegisteredPlugin;
import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -15,20 +14,19 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import static io.kestra.core.models.Plugin.isDeprecated;
@CommandLine.Command(
name = "doc",
description = "Generate documentation for all plugins currently installed"
description = "write documentation for all plugins currently installed"
)
public class PluginDocCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Parameters(index = "0", description = "Path to write documentation files")
@CommandLine.Parameters(index = "0", description = "Path to write documentations files")
private Path output = Paths.get(System.getProperty("user.dir"), "docs");
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks docs files")
@@ -37,86 +35,59 @@ public class PluginDocCommand extends AbstractCommand {
@CommandLine.Option(names = {"--icons"}, description = "Also write icon for each task")
private boolean icons = false;
@CommandLine.Option(names = {"--schema"}, description = "Also write JSON Schema for each task")
private boolean schema = false;
@CommandLine.Option(names = {"--skip-deprecated"},description = "Skip deprecated plugins when generating documentations")
private boolean skipDeprecated = false;
@Override
public Integer call() throws Exception {
super.call();
PluginScanner pluginScanner = new PluginScanner(PluginDocCommand.class.getClassLoader());
List<RegisteredPlugin> scan = new ArrayList<>(pluginScanner.scan(this.pluginsPath));
if (core) {
PluginScanner corePluginScanner = new PluginScanner(PluginDocCommand.class.getClassLoader());
scan.add(corePluginScanner.scan());
}
DocumentationGenerator documentationGenerator = applicationContext.getBean(DocumentationGenerator.class);
PluginRegistry registry = pluginRegistryProvider.get();
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
if (skipDeprecated) {
plugins = plugins.stream()
.filter(plugin -> !isDeprecated(plugin.getClass()))
.toList();
}
boolean hasFailures = false;
for (RegisteredPlugin registeredPlugin : scan) {
documentationGenerator
.generate(registeredPlugin)
.forEach(s -> {
File file = Paths.get(output.toAbsolutePath().toString(), s.getPath()).toFile();
for (RegisteredPlugin registeredPlugin : plugins) {
try {
documentationGenerator
.generate(registeredPlugin)
.forEach(s -> {
File file = Paths.get(output.toAbsolutePath().toString(), s.getPath()).toFile();
if (!file.getParentFile().exists()) {
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
}
try {
Files
.asCharSink(
file,
StandardCharsets.UTF_8
).write(s.getBody());
stdOut("Generate doc in: {0}", file);
if (s.getIcon() != null && this.icons) {
File iconFile = new File(
file.getParent(),
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".svg"
);
Files
.asByteSink(iconFile)
.write(Base64.getDecoder().decode(s.getIcon().getBytes(StandardCharsets.UTF_8)));
stdOut("Generate icon in: {0}", iconFile);
}
if (this.schema && s.getSchema() != null) {
File jsonSchemaFile = new File(
file.getParent(),
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".json"
);
Files
.asByteSink(jsonSchemaFile)
.write(JacksonMapper.ofJson().writeValueAsBytes(s.getSchema()));
stdOut("Generate json schema in: {0}", jsonSchemaFile);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
if (!file.getParentFile().exists()) {
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
}
);
} catch (Error e) {
stdErr("Failure to generate documentation for plugin {0}: {1}", registeredPlugin.name(), e);
hasFailures = true;
}
try {
com.google.common.io.Files
.asCharSink(
file,
Charsets.UTF_8
).write(s.getBody());
stdOut("Generate doc in: {0}", file);
if (s.getIcon() != null && this.icons) {
File iconFile = new File(
file.getParent(),
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".svg"
);
com.google.common.io.Files
.asByteSink(iconFile)
.write(Base64.getDecoder().decode(s.getIcon().getBytes(StandardCharsets.UTF_8)));
stdOut("Generate icon in: {0}", iconFile);
}
stdOut("Generate doc in: {0}", file);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
);
}
return hasFailures ? 1 : 0;
}
/** {@inheritDoc} **/
@Override
protected boolean isPluginManagerEnabled() {
return false;
return 0;
}
}

View File

@@ -1,137 +1,100 @@
package io.kestra.cli.commands.plugins;
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.plugins.LocalPluginManager;
import io.kestra.core.plugins.MavenPluginDownloader;
import io.kestra.core.plugins.PluginArtifact;
import io.kestra.core.plugins.PluginCatalogService;
import io.kestra.core.plugins.PluginManager;
import io.micronaut.http.client.HttpClient;
import io.micronaut.http.client.annotation.Client;
import io.micronaut.http.uri.UriBuilder;
import org.apache.commons.io.FilenameUtils;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.plugins.PluginDownloader;
import io.kestra.cli.plugins.RepositoryConfig;
import io.kestra.core.utils.IdUtils;
import jakarta.inject.Provider;
import org.apache.http.client.utils.URIBuilder;
import picocli.CommandLine;
import java.net.URI;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import jakarta.inject.Inject;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import picocli.CommandLine.Option;
import picocli.CommandLine.Spec;
@Command(
import static io.kestra.core.utils.Rethrow.throwConsumer;
@CommandLine.Command(
name = "install",
description = "Install plugins"
description = "install a plugin"
)
public class PluginInstallCommand extends AbstractCommand {
@Option(names = {"--locally"}, description = "Specifies if plugins must be installed locally. If set to false the installation depends on your Kestra configuration.")
boolean locally = true;
@Option(names = {"--all"}, description = "Install all available plugins")
boolean all = false;
@Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
@CommandLine.Parameters(index = "0..*", description = "the plugins to install")
List<String> dependencies = new ArrayList<>();
@Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
@CommandLine.Option(names = {"--repositories"}, description = "url to additional maven repositories")
private URI[] repositories;
@Spec
@CommandLine.Spec
CommandLine.Model.CommandSpec spec;
@Inject
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
@Inject
Provider<PluginCatalogService> pluginCatalogService;
private PluginDownloader pluginDownloader;
@Override
public Integer call() throws Exception {
super.call();
if (this.locally && this.pluginsPath == null) {
if (this.pluginsPath == null) {
throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " +
"or environment variable 'KESTRA_PLUGINS_PATH"
);
}
List<MavenPluginRepositoryConfig> repositoryConfigs = List.of();
if (repositories != null) {
repositoryConfigs = Arrays.stream(repositories)
.map(uri -> {
MavenPluginRepositoryConfig.MavenPluginRepositoryConfigBuilder builder = MavenPluginRepositoryConfig
.builder()
.id(IdUtils.create());
String userInfo = uri.getUserInfo();
if (userInfo != null) {
String[] userInfoParts = userInfo.split(":");
builder = builder.basicAuth(new MavenPluginRepositoryConfig.BasicAuth(
userInfoParts[0],
userInfoParts[1]
));
}
builder.url(UriBuilder.of(uri).userInfo(null).build().toString());
return builder.build();
}).toList();
}
if (all) {
PluginCatalogService service = pluginCatalogService.get();
dependencies = service.get().stream().map(Objects::toString).toList();
}
if (dependencies.isEmpty()) {
stdErr("Error: No plugin to install.");
return CommandLine.ExitCode.OK;
}
final List<PluginArtifact> pluginArtifacts;
try {
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
} catch (IllegalArgumentException e) {
stdErr(e.getMessage());
return CommandLine.ExitCode.USAGE;
}
try (final PluginManager pluginManager = getPluginManager()) {
List<PluginArtifact> installed;
if (all) {
installed = new ArrayList<>(pluginArtifacts.size());
for (PluginArtifact pluginArtifact : pluginArtifacts) {
try {
installed.add(pluginManager.install(pluginArtifact, repositoryConfigs, false, pluginsPath));
} catch (KestraRuntimeException e) {
String cause = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
stdErr("Failed to install plugin {0}. Cause: {1}", pluginArtifact, cause);
}
}
} else {
installed = pluginManager.install(pluginArtifacts, repositoryConfigs, false, pluginsPath);
if (!pluginsPath.toFile().exists()) {
if (!pluginsPath.toFile().mkdir()) {
throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath());
}
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
return CommandLine.ExitCode.OK;
}
}
private PluginManager getPluginManager() {
return locally ? new LocalPluginManager(mavenPluginRepositoryProvider.get()) : this.pluginManagerProvider.get();
}
if (repositories != null) {
Arrays.stream(repositories)
.forEach(throwConsumer(s -> {
URIBuilder uriBuilder = new URIBuilder(s);
@Override
protected boolean loadExternalPlugins() {
return false;
RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder()
.id(IdUtils.create())
.type("default");
if (uriBuilder.getUserInfo() != null) {
int index = uriBuilder.getUserInfo().indexOf(":");
builder.basicAuth(new RepositoryConfig.BasicAuth(
uriBuilder.getUserInfo().substring(0, index),
uriBuilder.getUserInfo().substring(index + 1)
));
uriBuilder.setUserInfo(null);
}
builder.url(uriBuilder.build().toString());
pluginDownloader.addRepository(builder.build());
}));
}
List<URL> resolveUrl = pluginDownloader.resolve(dependencies);
stdOut("Resolved Plugin(s) with {0}", resolveUrl);
for (URL url: resolveUrl) {
Files.copy(
Paths.get(url.toURI()),
Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())),
StandardCopyOption.REPLACE_EXISTING
);
}
stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath);
return 0;
}
}

View File

@@ -1,31 +1,24 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.contexts.KestraClassLoader;
import io.kestra.core.plugins.PluginScanner;
import io.kestra.core.plugins.RegisteredPlugin;
import jakarta.inject.Inject;
import jakarta.inject.Provider;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
import picocli.CommandLine.Spec;
import java.util.List;
@Command(
@CommandLine.Command(
name = "list",
description = "List all plugins already installed"
description = "list all plugins already installed"
)
public class PluginListCommand extends AbstractCommand {
@Spec
@CommandLine.Spec
CommandLine.Model.CommandSpec spec;
@Option(names = {"--core"}, description = "Also write core tasks plugins")
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks plugins")
private boolean core = false;
@Inject
private PluginRegistry registry;
@Override
public Integer call() throws Exception {
super.call();
@@ -36,9 +29,15 @@ public class PluginListCommand extends AbstractCommand {
);
}
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
PluginScanner pluginScanner = new PluginScanner(KestraClassLoader.instance());
List<RegisteredPlugin> scan = pluginScanner.scan(this.pluginsPath);
plugins.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
if (core) {
PluginScanner corePluginScanner = new PluginScanner(PluginDocCommand.class.getClassLoader());
scan.add(corePluginScanner.scan());
}
scan.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
return 0;
}

View File

@@ -1,149 +0,0 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.client.HttpClient;
import io.micronaut.http.client.annotation.Client;
import jakarta.inject.Inject;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.ArrayList;
import java.util.List;
@Command(
name = "search",
description = "Search for available Kestra plugins"
)
public class PluginSearchCommand extends AbstractCommand {
@Inject
@Client("api")
private HttpClient httpClient;
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final char SPACE = ' ';
@Parameters(index = "0", description = "Search term (optional)", defaultValue = "")
private String searchTerm;
@Override
public Integer call() throws Exception {
super.call();
try {
JsonNode root = fetchPlugins();
List<PluginInfo> plugins = findPlugins(root);
printResults(plugins);
return 0;
} catch (Exception e) {
stdOut("Error processing plugins: {0}", e.getMessage());
return 1;
}
}
private JsonNode fetchPlugins() throws Exception {
String response = httpClient.toBlocking()
.retrieve(
HttpRequest.GET("/v1/plugins")
.header("Accept", "application/json")
);
return MAPPER.readTree(response);
}
private List<PluginInfo> findPlugins(JsonNode root) {
String searchTermLower = searchTerm.toLowerCase();
List<PluginInfo> plugins = new ArrayList<>();
for (JsonNode plugin : root) {
if (matchesSearch(plugin, searchTermLower)) {
plugins.add(new PluginInfo(
plugin.path("name").asText(),
plugin.path("title").asText(),
plugin.path("group").asText(),
plugin.path("version").asText("")
));
}
}
plugins.sort((p1, p2) -> p1.name.compareToIgnoreCase(p2.name));
return plugins;
}
private boolean matchesSearch(JsonNode plugin, String term) {
if (term.isEmpty()) {
return true;
}
return plugin.path("name").asText().toLowerCase().contains(term) ||
plugin.path("title").asText().toLowerCase().contains(term) ||
plugin.path("group").asText().toLowerCase().contains(term);
}
private void printResults(List<PluginInfo> plugins) {
if (plugins.isEmpty()) {
stdOut("No plugins found{0}",
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'");
return;
}
stdOut("\nFound {0} plugins{1}",
plugins.size(),
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'"
);
printPluginsTable(plugins);
}
private void printPluginsTable(List<PluginInfo> plugins) {
int maxName = 4, maxTitle = 5, maxGroup = 5;
for (PluginInfo plugin : plugins) {
maxName = Math.max(maxName, plugin.name.length());
maxTitle = Math.max(maxTitle, plugin.title.length());
maxGroup = Math.max(maxGroup, plugin.group.length());
}
StringBuilder namePad = new StringBuilder(maxName);
StringBuilder titlePad = new StringBuilder(maxTitle);
StringBuilder groupPad = new StringBuilder(maxGroup);
stdOut("");
printRow(namePad, titlePad, groupPad, "NAME", "TITLE", "GROUP", "VERSION",
maxName, maxTitle, maxGroup);
for (PluginInfo plugin : plugins) {
printRow(namePad, titlePad, groupPad, plugin.name, plugin.title, plugin.group, plugin.version,
maxName, maxTitle, maxGroup);
}
stdOut("");
}
private void printRow(StringBuilder namePad, StringBuilder titlePad, StringBuilder groupPad,
String name, String title, String group, String version,
int maxName, int maxTitle, int maxGroup) {
stdOut("{0} {1} {2} {3}",
pad(namePad, name, maxName),
pad(titlePad, title, maxTitle),
pad(groupPad, group, maxGroup),
version
);
}
private String pad(StringBuilder sb, String str, int length) {
sb.setLength(0);
sb.append(str);
while (sb.length() < length) {
sb.append(SPACE);
}
return sb.toString();
}
private record PluginInfo(String name, String title, String group, String version) {}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,69 +0,0 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.plugins.LocalPluginManager;
import io.kestra.core.plugins.MavenPluginDownloader;
import io.kestra.core.plugins.PluginArtifact;
import io.kestra.core.plugins.PluginManager;
import jakarta.inject.Inject;
import jakarta.inject.Provider;
import picocli.CommandLine;
import picocli.CommandLine.Parameters;
import picocli.CommandLine.Spec;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
@CommandLine.Command(
name = "uninstall",
description = "Uninstall plugins"
)
public class PluginUninstallCommand extends AbstractCommand {
@Parameters(index = "0..*", description = "The plugins to uninstall. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
List<String> dependencies = new ArrayList<>();
@Spec
CommandLine.Model.CommandSpec spec;
@Inject
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
@Override
public Integer call() throws Exception {
super.call();
List<PluginArtifact> pluginArtifacts;
try {
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
} catch (IllegalArgumentException e) {
stdErr(e.getMessage());
return CommandLine.ExitCode.USAGE;
}
final PluginManager pluginManager;
// If a PLUGIN_PATH is provided, then use the LocalPluginManager
if (pluginsPath != null) {
pluginManager = new LocalPluginManager(mavenPluginRepositoryProvider.get());
} else {
// Otherwise, we delegate to the configured plugin-manager.
pluginManager = this.pluginManagerProvider.get();
}
List<PluginArtifact> uninstalled = pluginManager.uninstall(
pluginArtifacts,
false,
pluginsPath
);
List<URI> uris = uninstalled.stream().map(PluginArtifact::uri).toList();
stdOut("Successfully uninstalled plugins {0} from {1}", dependencies, uris);
return CommandLine.ExitCode.OK;
}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,29 +1,9 @@
package io.kestra.cli.commands.servers;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.contexts.KestraContext;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@Slf4j
public abstract class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
@CommandLine.Option(names = {"--port"}, description = "the port to bind")
Integer serverPort;
@Override
public Integer call() throws Exception {
log.info("Machine information: {} available cpu(s), {}MB max memory, Java version {}", Runtime.getRuntime().availableProcessors(), maxMemoryInMB(), Runtime.version());
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
return super.call();
}
private long maxMemoryInMB() {
return Runtime.getRuntime().maxMemory() / 1024 / 1024;
}
protected static int defaultWorkerThread() {
return Runtime.getRuntime().availableProcessors() * 8;
}
}

View File

@@ -1,13 +1,14 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.ExecutorInterface;
import io.kestra.executor.SkipExecutionService;
import io.kestra.core.services.StartExecutorService;
import io.kestra.core.services.SkipExecutionService;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.util.Collections;
@@ -16,8 +17,9 @@ import java.util.Map;
@CommandLine.Command(
name = "executor",
description = "Start the Kestra executor"
description = "start an executor"
)
@Slf4j
public class ExecutorCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@@ -25,27 +27,9 @@ public class ExecutorCommand extends AbstractServerCommand {
@Inject
private SkipExecutionService skipExecutionService;
@Inject
private StartExecutorService startExecutorService;
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "The list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipExecutions = Collections.emptyList();
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "The list of flow identifiers (tenant|namespace|flowId) to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipFlows = Collections.emptyList();
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "The list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipNamespaces = Collections.emptyList();
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "The list of tenants to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipTenants = Collections.emptyList();
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "The list of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
private List<String> startExecutors = Collections.emptyList();
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "The list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
private List<String> notStartExecutors = Collections.emptyList();
@SuppressWarnings("unused")
public static Map<String, Object> propertiesOverrides() {
return ImmutableMap.of(
@@ -56,17 +40,15 @@ public class ExecutorCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
this.skipExecutionService.setSkipExecutions(skipExecutions);
this.skipExecutionService.setSkipFlows(skipFlows);
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
this.skipExecutionService.setSkipTenants(skipTenants);
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
executorService.run();
log.info("Executor started");
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -1,19 +1,22 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.Indexer;
import io.kestra.core.runners.IndexerInterface;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.util.Map;
@CommandLine.Command(
name = "indexer",
description = "Start the Kestra indexer"
description = "start an indexer"
)
@Slf4j
public class IndexerCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@@ -28,10 +31,13 @@ public class IndexerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
Indexer indexer = applicationContext.getBean(Indexer.class);
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
indexer.run();
log.info("Indexer started");
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -12,7 +12,7 @@ import java.util.Map;
@CommandLine.Command(
name = "local",
description = "Start the local development server"
description = "start a local server"
)
public class LocalCommand extends StandAloneCommand {
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
@@ -32,7 +32,7 @@ public class LocalCommand extends StandAloneCommand {
"kestra.queue.type", "h2",
"kestra.storage.type", "local",
"kestra.storage.local.base-path", data.toString(),
"datasources.h2.url", "jdbc:h2:file:" + data.resolve("database") + ";TIME ZONE=UTC;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE;LOCK_TIMEOUT=30000",
"datasources.h2.url", "jdbc:h2:file:" + data.resolve("database") + ";DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE;LOCK_TIMEOUT=30000",
"datasources.h2.username", "sa",
"datasources.h2.password", "",
"datasources.h2.driverClassName", "org.h2.Driver",

View File

@@ -1,8 +1,9 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.scheduler.AbstractScheduler;
import io.kestra.core.schedulers.AbstractScheduler;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -13,7 +14,7 @@ import java.util.Map;
@CommandLine.Command(
name = "scheduler",
description = "Start the Kestra scheduler"
description = "start an scheduler"
)
@Slf4j
public class SchedulerCommand extends AbstractServerCommand {
@@ -30,10 +31,12 @@ public class SchedulerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
scheduler.run();
log.info("Scheduler started");
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -9,7 +9,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "server",
description = "Manage servers",
description = "handle servers",
mixinStandardHelpOptions = true,
subcommands = {
ExecutorCommand.class,

View File

@@ -1,18 +1,15 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.cli.services.FileChangedEventListener;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
import io.kestra.cli.StandAloneRunner;
import io.kestra.executor.SkipExecutionService;
import io.kestra.core.services.StartExecutorService;
import io.kestra.core.runners.StandAloneRunner;
import io.kestra.core.services.SkipExecutionService;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.annotation.Nullable;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.io.File;
@@ -23,8 +20,9 @@ import java.util.Map;
@CommandLine.Command(
name = "standalone",
description = "Start the standalone all-in-one server"
description = "start a standalone server"
)
@Slf4j
public class StandAloneCommand extends AbstractServerCommand {
@CommandLine.Spec
CommandLine.Model.CommandSpec spec;
@@ -35,51 +33,15 @@ public class StandAloneCommand extends AbstractServerCommand {
@Inject
private SkipExecutionService skipExecutionService;
@Inject
private StartExecutorService startExecutorService;
@Inject
@Nullable
private FileChangedEventListener fileWatcher;
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "the flow path containing flow to inject at startup (when running with a memory flow repository)")
private File flowPath;
@CommandLine.Option(names = "--tenant", description = "Tenant identifier, Required to load flows from path with the enterprise edition")
private String tenantId;
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to eight times the number of available processors. Set it to 0 to avoid starting a worker.")
private int workerThread = defaultWorkerThread();
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker thread")
private Integer workerThread;
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipExecutions = Collections.emptyList();
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "a list of flow identifiers (namespace.flowId) to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipFlows = Collections.emptyList();
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "a list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipNamespaces = Collections.emptyList();
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "a list of tenants to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipTenants = Collections.emptyList();
@CommandLine.Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
boolean tutorialsDisabled = false;
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "a list of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
private List<String> startExecutors = Collections.emptyList();
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "a list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
private List<String> notStartExecutors = Collections.emptyList();
@CommandLine.Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
boolean indexerDisabled = false;
@Override
public boolean isFlowAutoLoadEnabled() {
return !tutorialsDisabled;
}
@SuppressWarnings("unused")
public static Map<String, Object> propertiesOverrides() {
return ImmutableMap.of(
@@ -90,46 +52,30 @@ public class StandAloneCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
this.skipExecutionService.setSkipExecutions(skipExecutions);
this.skipExecutionService.setSkipFlows(skipFlows);
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
this.skipExecutionService.setSkipTenants(skipTenants);
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
if (flowPath != null) {
try {
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
TenantIdSelectorService tenantIdSelectorService = applicationContext.getBean(TenantIdSelectorService.class);
localFlowRepositoryLoader.load(tenantIdSelectorService.getTenantId(this.tenantId), this.flowPath);
localFlowRepositoryLoader.load(this.flowPath, true);
} catch (IOException e) {
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
}
}
try (StandAloneRunner standAloneRunner = applicationContext.getBean(StandAloneRunner.class)) {
StandAloneRunner standAloneRunner = applicationContext.getBean(StandAloneRunner.class);
if (this.workerThread == 0) {
standAloneRunner.setWorkerEnabled(false);
} else {
standAloneRunner.setWorkerThread(this.workerThread);
}
if (this.indexerDisabled) {
standAloneRunner.setIndexerEnabled(false);
}
standAloneRunner.run();
if (fileWatcher != null) {
fileWatcher.startListeningFromConfig();
}
Await.until(() -> !this.applicationContext.isRunning());
if (this.workerThread != null && this.workerThread == 0) {
standAloneRunner.setWorkerEnabled(false);
} else if (this.workerThread != null) {
standAloneRunner.setWorkerThread(this.workerThread);
}
standAloneRunner.run();
Await.until(() -> !this.applicationContext.isRunning());
return 0;
}
}

View File

@@ -1,44 +1,25 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.Indexer;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.ExecutorsUtils;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
import java.util.Map;
import java.util.concurrent.ExecutorService;
@CommandLine.Command(
name = "webserver",
description = "Start the Kestra webserver"
description = "start the webserver"
)
@Slf4j
public class WebServerCommand extends AbstractServerCommand {
private ExecutorService poolExecutor;
@Inject
private ApplicationContext applicationContext;
@Inject
private ExecutorsUtils executorsUtils;
@Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
boolean tutorialsDisabled = false;
@Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
boolean indexerDisabled = false;
@Override
public boolean isFlowAutoLoadEnabled() {
return !tutorialsDisabled;
}
@SuppressWarnings("unused")
public static Map<String, Object> propertiesOverrides() {
return ImmutableMap.of(
@@ -49,16 +30,8 @@ public class WebServerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
super.call();
// start the indexer
if (!indexerDisabled) {
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
poolExecutor.execute(applicationContext.getBean(Indexer.class));
shutdownHook(false, () -> poolExecutor.shutdown());
}
log.info("Webserver started");
this.shutdownHook(() -> KestraContext.getContext().shutdown());
Await.until(() -> !this.applicationContext.isRunning());
return 0;
}

View File

@@ -7,25 +7,26 @@ import io.kestra.core.runners.Worker;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
import java.util.Map;
import java.util.UUID;
@CommandLine.Command(
name = "worker",
description = "Start the Kestra worker"
description = "start a worker"
)
@Slf4j
public class WorkerCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to eight times the number of available processors")
private int thread = defaultWorkerThread();
@CommandLine.Option(names = {"-t", "--thread"}, description = "the max number of concurrent threads to launch")
private int thread = Runtime.getRuntime().availableProcessors() * 2;
@Option(names = {"-g", "--worker-group"}, description = "The worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
@CommandLine.Option(names = {"-g", "--worker-group"}, description = "the worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
private String workerGroupKey = null;
@SuppressWarnings("unused")
@@ -37,11 +38,8 @@ public class WorkerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
KestraContext.getContext().injectWorkerConfigs(thread, workerGroupKey);
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
}
@@ -53,12 +51,15 @@ public class WorkerCommand extends AbstractServerCommand {
worker.run();
if (this.workerGroupKey != null) {
log.info("Worker started with {} thread(s) in group '{}'", this.thread, this.workerGroupKey);
}
else {
log.info("Worker started with {} thread(s)", this.thread);
}
Await.until(() -> !this.applicationContext.isRunning());
return 0;
}
public String workerGroupKey() {
return workerGroupKey;
}
}

View File

@@ -2,7 +2,6 @@ package io.kestra.cli.commands.sys;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -10,11 +9,10 @@ import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.util.List;
import java.util.Objects;
@CommandLine.Command(
name = "reindex",
description = "Reindex all records of a type: read them from the database then update them",
description = "reindex all records of a type: read them from the database then update them",
mixinStandardHelpOptions = true
)
@Slf4j
@@ -35,8 +33,8 @@ public class ReindexCommand extends AbstractCommand {
List<Flow> allFlow = flowRepository.findAllForAllTenants();
allFlow.stream()
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
.filter(Objects::nonNull)
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
.filter(flow -> flow != null)
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow()));
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
}

View File

@@ -15,8 +15,6 @@ import picocli.CommandLine;
import java.util.Optional;
import static io.kestra.core.utils.Rethrow.throwConsumer;
@CommandLine.Command(
name = "submit-queued-execution",
description = {"Submit all queued execution to the executor",
@@ -51,7 +49,7 @@ public class SubmitQueuedCommand extends AbstractCommand {
var executionQueuedStorage = applicationContext.getBean(AbstractJdbcExecutionQueuedStorage.class);
for (ExecutionQueued queued : executionQueuedStorage.getAllForAllTenants()) {
executionQueuedStorage.pop(queued.getTenantId(), queued.getNamespace(), queued.getFlowId(), throwConsumer(execution -> executionQueue.emit(execution.withState(State.Type.CREATED))));
executionQueuedStorage.pop(queued.getTenantId(), queued.getNamespace(), queued.getFlowId(), execution -> executionQueue.emit(execution.withState(State.Type.CREATED)));
cpt++;
}
}

View File

@@ -1,7 +1,6 @@
package io.kestra.cli.commands.sys;
import io.kestra.cli.commands.sys.database.DatabaseCommand;
import io.kestra.cli.commands.sys.statestore.StateStoreCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.extern.slf4j.Slf4j;
import io.kestra.cli.AbstractCommand;
@@ -10,13 +9,12 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "sys",
description = "Manage system maintenance mode",
description = "handle systems maintenance",
mixinStandardHelpOptions = true,
subcommands = {
ReindexCommand.class,
DatabaseCommand.class,
SubmitQueuedCommand.class,
StateStoreCommand.class
SubmitQueuedCommand.class
}
)
@Slf4j

View File

@@ -8,7 +8,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "database",
description = "Manage Kestra database",
description = "manage Kestra database",
mixinStandardHelpOptions = true,
subcommands = {
DatabaseMigrateCommand.class,

View File

@@ -1,5 +1,6 @@
package io.kestra.cli.commands.sys.database;
import ch.qos.logback.classic.Level;
import io.kestra.cli.AbstractCommand;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -13,6 +14,13 @@ import java.util.Map;
)
@Slf4j
public class DatabaseMigrateCommand extends AbstractCommand {
static {
// Force enable Flyway logs
ch.qos.logback.classic.Logger flywayLogger = (ch.qos.logback.classic.Logger) org.slf4j.LoggerFactory.getLogger("org.flywaydb");
flywayLogger.setLevel(Level.INFO);
}
@Override
public Integer call() throws Exception {
// Flyway will run automatically

View File

@@ -1,27 +0,0 @@
package io.kestra.cli.commands.sys.statestore;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import picocli.CommandLine;
@CommandLine.Command(
name = "state-store",
description = "Manage Kestra State Store",
mixinStandardHelpOptions = true,
subcommands = {
StateStoreMigrateCommand.class,
}
)
public class StateStoreCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "sys", "state-store", "--help");
return 0;
}
}

View File

@@ -1,81 +0,0 @@
package io.kestra.cli.commands.sys.statestore;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.RunContextFactory;
import io.kestra.core.storages.StateStore;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.Slugify;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
@CommandLine.Command(
name = "migrate",
description = "Migrate old state store files to use the new KV Store implementation.",
mixinStandardHelpOptions = true
)
@Slf4j
public class StateStoreMigrateCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@Override
public Integer call() throws Exception {
super.call();
FlowRepositoryInterface flowRepository = this.applicationContext.getBean(FlowRepositoryInterface.class);
StorageInterface storageInterface = this.applicationContext.getBean(StorageInterface.class);
RunContextFactory runContextFactory = this.applicationContext.getBean(RunContextFactory.class);
flowRepository.findAllForAllTenants().stream().map(flow -> Map.entry(flow, List.of(
URI.create("/" + flow.getNamespace().replace(".", "/") + "/" + Slugify.of(flow.getId()) + "/states"),
URI.create("/" + flow.getNamespace().replace(".", "/") + "/states")
))).map(potentialStateStoreUrisForAFlow -> Map.entry(potentialStateStoreUrisForAFlow.getKey(), potentialStateStoreUrisForAFlow.getValue().stream().flatMap(uri -> {
try {
return storageInterface.allByPrefix(potentialStateStoreUrisForAFlow.getKey().getTenantId(), potentialStateStoreUrisForAFlow.getKey().getNamespace(), uri, false).stream();
} catch (IOException e) {
return Stream.empty();
}
}).toList())).forEach(stateStoreFileUrisForAFlow -> stateStoreFileUrisForAFlow.getValue().forEach(stateStoreFileUri -> {
Flow flow = stateStoreFileUrisForAFlow.getKey();
String[] flowQualifierWithStateQualifiers = stateStoreFileUri.getPath().split("/states/");
String[] statesUriPart = flowQualifierWithStateQualifiers[1].split("/");
String stateName = statesUriPart[0];
String taskRunValue = statesUriPart.length > 2 ? statesUriPart[1] : null;
String stateSubName = statesUriPart[statesUriPart.length - 1];
boolean flowScoped = flowQualifierWithStateQualifiers[0].endsWith("/" + flow.getId());
StateStore stateStore = new StateStore(runContext(runContextFactory, flow), false);
try (InputStream is = storageInterface.get(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri)) {
stateStore.putState(flowScoped, stateName, stateSubName, taskRunValue, is.readAllBytes());
storageInterface.delete(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri);
} catch (IOException e) {
throw new RuntimeException(e);
}
}));
stdOut("Successfully ran the state-store migration.");
return 0;
}
private RunContext runContext(RunContextFactory runContextFactory, Flow flow) {
Map<String, String> flowVariables = new HashMap<>();
flowVariables.put("tenantId", flow.getTenantId());
flowVariables.put("id", flow.getId());
flowVariables.put("namespace", flow.getNamespace());
return runContextFactory.of(flow, Map.of("flow", flowVariables));
}
}

View File

@@ -11,7 +11,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "template",
description = "Manage templates",
description = "handle templates",
mixinStandardHelpOptions = true,
subcommands = {
TemplateNamespaceCommand.class,

View File

@@ -1,9 +1,8 @@
package io.kestra.cli.commands.templates;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.templates.TemplateEnabled;
import io.micronaut.context.ApplicationContext;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
@@ -19,7 +18,7 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "export",
description = "Export templates to a ZIP file",
description = "export templates to a zip file",
mixinStandardHelpOptions = true
)
@Slf4j
@@ -27,13 +26,14 @@ import java.nio.file.Path;
public class TemplateExportCommand extends AbstractApiCommand {
private static final String DEFAULT_FILE_NAME = "templates.zip";
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
@Inject
private TenantIdSelectorService tenantService;
private ApplicationContext applicationContext;
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
@CommandLine.Option(names = {"--namespace"}, description = "the namespace of templates to export")
public String namespace;
@CommandLine.Parameters(index = "0", description = "The directory to export the file to")
@CommandLine.Parameters(index = "0", description = "the directory to export the file to")
public Path directory;
@Override
@@ -42,7 +42,7 @@ public class TemplateExportCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<Object> request = HttpRequest
.GET(apiUri("/templates/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
.GET(apiUri("/templates/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
.accept(MediaType.APPLICATION_OCTET_STREAM);
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
@@ -52,7 +52,7 @@ public class TemplateExportCommand extends AbstractApiCommand {
stdOut("Exporting template(s) for namespace '" + namespace + "' successfully done !");
} catch (HttpClientResponseException e) {
AbstractValidateCommand.handleHttpException(e, "template");
TemplateValidateCommand.handleHttpException(e, "template");
return 1;
}

Some files were not shown because too many files have changed in this diff Show More