Compare commits

..

46 Commits

Author SHA1 Message Date
Florian Hussonnois
86f7eadb3c chore: update version to v0.19.2 2024-10-08 15:03:27 +02:00
Loïc Mathieu
2def5cf7f8 fix(jdbc): always include deleted the the logs and metrics queries
Even if not needed to be sure we use the correct index.
2024-10-08 13:11:51 +02:00
Florian Hussonnois
d184858abf feat(core): move service usages 2024-10-08 11:02:11 +02:00
Sachin
dfa5875fa1 feat(ui): add chart visibility toggle to flows and logs page (#5345)
Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
2024-10-08 10:08:33 +02:00
Sachin
ac4f7f261d fix(ui): amend translation keys usage (#5346)
Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
2024-10-08 09:48:38 +02:00
GitHub Action
ae55685d2e chore(translations): auto generate values for languages other than english 2024-10-08 09:26:20 +02:00
Sai Mounika Peri
dd34317e4f feat(ui): improve page shown when flow has no dependencies (#5340) 2024-10-08 09:26:11 +02:00
riya mustare
f95e3073dd chore(ui): reduced line height on input description (#5344) 2024-10-08 09:18:03 +02:00
Florian Hussonnois
9f20988997 fix(core): use tenant for resolving worker groups 2024-10-07 14:16:00 +02:00
Sachin
5da3ab4f71 fix(ui): add bottom border on debug outputs (#5334)
Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
2024-10-07 13:06:30 +02:00
Sachin
243eaab826 fix(ui): prevent removal of empty fields in metadata editor (#5313)
Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
2024-10-07 11:25:37 +02:00
Sachin
6d362d688d fix(ui): amend flow disable from low code editor (#5315)
Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
2024-10-07 11:20:28 +02:00
brian.mulier
39a01e0e7d fix(core): windows backslashes in paths were leading to wrong URI being created leading to error upon execution deletion 2024-10-07 11:19:35 +02:00
Sachin
a44b2ef7cb fix(ui): persisting flow metadata from low code editor (#5316)
Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
2024-10-07 11:15:16 +02:00
Sachin
6bcad13444 feat(ui): added executions tab to single namespace (#5322)
Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
2024-10-07 11:05:02 +02:00
Antoine Gauthier
02acf01ea5 chore(ui): update button conditions based on flow states (#5319) 2024-10-07 10:39:06 +02:00
Sai Mounika Peri
55193361b8 chore(ui): improve validation for kv store (#5321)
* Validation error of previous type should be cleared once the KV type is changed

* chore(ui): remove comment as code is self-explanatory

---------

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2024-10-07 09:28:38 +02:00
brian.mulier
8d509a3ba5 fix(core): path matcher for windows were not working 2024-10-04 19:41:30 +02:00
GitHub Action
500680bcf7 chore(translations): auto generate values for languages other than english 2024-10-04 15:47:10 +02:00
Miloš Paunović
412c27cb12 chore(ui): improve the dashboard ratios calculation (#5311) 2024-10-04 15:46:59 +02:00
Sachin
8d7d9a356f chore(ui): use improved chart for flow executions (#5309)
* Replace the Flows Execution barchart with the barchart used on the main dashboard

* chore(ui): added bottom margin

---------

Co-authored-by: Sachin KS <mac@apples-MacBook-Air.local>
Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2024-10-04 15:01:14 +02:00
Miloš Paunović
d2ab2e97b4 fix(ui): prevent cases where dashboard totals shows nan instead of value (#5308) 2024-10-04 11:01:41 +02:00
Miloš Paunović
6a0f360fc6 fix(ui): amend end date on dashboard refresh (#5303) 2024-10-04 09:14:07 +02:00
Vivek Gangwani
0484fd389a chore(ui): making the color scheme the same for gantt and topology(#5280) 2024-10-04 09:13:14 +02:00
Miloš Paunović
e92aac3b39 chore(ui): re-calculate translation strings for left menu after language change (#5302) 2024-10-04 08:04:02 +02:00
Miloš Paunović
39b8ac8804 chore(ci): add check for translation keys matching (#5301) 2024-10-04 07:37:15 +02:00
Miloš Paunović
f928ed5876 chore(ui): uniform translation keys across languages (#5298) 2024-10-04 07:37:06 +02:00
Miloš Paunović
54856af0a8 fix(ui): amend logs scrolling for the last task (#5294) 2024-10-03 16:28:02 +02:00
MilosPaunovic
8bd79e82ab chore(ci): exit workflow with success if no changes are present 2024-10-03 16:27:53 +02:00
MilosPaunovic
104a491b92 chore(ci): separate direct pull requests and the ones from forked repositories 2024-10-03 16:27:44 +02:00
MilosPaunovic
5f46a0dd16 chore(ci): expose paste to editor function globally for testing 2024-10-03 16:27:35 +02:00
Loïc Mathieu
24c3703418 fix(core): hide secret inputs in logs
Fixes #5259
2024-10-03 10:34:27 +02:00
yuri
e5af245855 fix(ui): enable keyboard shortcut to launch execution (#5288) 2024-10-03 08:19:06 +02:00
Vivek Gangwani
d58e8f98a2 fix (ui): Unable to unselect the currently chosen log level (#5287)
* Update root.scss to Fix Topology View for Light Mode

* Update root-dark.scss to unify Gantt and TOpology View Colors

* Added deselect button for Log Levels
2024-10-03 08:18:49 +02:00
MilosPaunovic
ce2f1bfdb3 chore(ui): uniform using import class 2024-10-02 15:15:48 +02:00
Miloš Paunović
b619f88eff chore(ci): generate translation values as a commit to existing pull request (#5278) 2024-10-02 12:48:39 +02:00
Sai Mounika Peri
1f1775752b chore(ui): update parent from metadata editor (#5265) 2024-10-02 11:10:03 +02:00
AbdurRahman2004
b2475e53a2 chore(ui): move the delete logs button to top (#5266)
* Move 'Delete logs' button to top right corner of navigation

---------

Co-authored-by: Miloš Paunović <paun992@hotmail.com>
2024-10-02 10:49:59 +02:00
Antoine Gauthier
7e8956a0b7 fix(ui): amend typos in french translations (#5272) 2024-10-02 10:48:14 +02:00
brian.mulier
6537ee984b chore(version): update to version 'v0.19.1'. 2024-10-01 22:32:48 +02:00
brian.mulier
573aa48237 fix(ci): add back datahub plugin to ci build 2024-10-01 22:32:07 +02:00
brian.mulier
66ddeaa219 chore(version): update to version 'v0.19.0'. 2024-10-01 18:15:40 +02:00
brian.mulier
02c5e8a1a2 fix(ci): remove datahub plugin for now as it's not finished 2024-10-01 18:15:40 +02:00
brian.mulier
733c7897b9 fix(ci): restore github release on main workflow in case of skipped e2e 2024-10-01 15:33:36 +02:00
brian.mulier
c051287688 fix(ci): publish maven even if E2E were skipped 2024-10-01 14:26:02 +02:00
brian.mulier
1af8de6bce fix(ci): no more docker build & E2E for tags build 2024-10-01 13:43:35 +02:00
2006 changed files with 47518 additions and 125698 deletions

View File

@@ -1,82 +0,0 @@
FROM ubuntu:24.04
ARG BUILDPLATFORM
ARG DEBIAN_FRONTEND=noninteractive
USER root
WORKDIR /root
RUN apt update && apt install -y \
apt-transport-https ca-certificates gnupg curl wget git zip unzip less zsh net-tools iputils-ping jq lsof
ENV HOME="/root"
# --------------------------------------
# Git
# --------------------------------------
# Need to add the devcontainer workspace folder as a safe directory to enable git
# version control system to be enabled in the containers file system.
RUN git config --global --add safe.directory "/workspaces/kestra"
# --------------------------------------
# --------------------------------------
# Oh my zsh
# --------------------------------------
RUN sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" -- \
-t robbyrussell \
-p git -p node -p npm
ENV SHELL=/bin/zsh
# --------------------------------------
# --------------------------------------
# Java
# --------------------------------------
ARG OS_ARCHITECTURE
RUN mkdir -p /usr/java
RUN echo "Building on platform: $BUILDPLATFORM"
RUN case "$BUILDPLATFORM" in \
"linux/amd64") OS_ARCHITECTURE="linux-x64" ;; \
"linux/arm64") OS_ARCHITECTURE="linux-aarch64" ;; \
"darwin/amd64") OS_ARCHITECTURE="macos-x64" ;; \
"darwin/arm64") OS_ARCHITECTURE="macos-aarch64" ;; \
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
esac && \
wget "https://aka.ms/download-jdk/microsoft-jdk-21.0.6-$OS_ARCHITECTURE.tar.gz" && \
mv "microsoft-jdk-21.0.6-$OS_ARCHITECTURE.tar.gz" microsoft-jdk-21.0.6.tar.gz
RUN tar -xzvf microsoft-jdk-21.0.6.tar.gz && \
mv jdk-21.0.6+7 jdk-21 && \
mv jdk-21 /usr/java/
ENV JAVA_HOME=/usr/java/jdk-21
ENV PATH="$PATH:$JAVA_HOME/bin"
# Will load a custom configuration file for Micronaut
ENV MICRONAUT_ENVIRONMENTS=local,override
# Sets the path where you save plugins as Jar and is loaded during the startup process
ENV KESTRA_PLUGINS_PATH="/workspaces/kestra/local/plugins"
# --------------------------------------
# --------------------------------------
# Node.js
# --------------------------------------
RUN curl -fsSL https://deb.nodesource.com/setup_22.x -o nodesource_setup.sh \
&& bash nodesource_setup.sh && apt install -y nodejs
# Increases JavaScript heap memory to 4GB to prevent heap out of error during startup
ENV NODE_OPTIONS=--max-old-space-size=4096
# --------------------------------------
# --------------------------------------
# Python
# --------------------------------------
RUN apt install -y python3 pip python3-venv
# --------------------------------------
# --------------------------------------
# SSH
# --------------------------------------
RUN mkdir -p ~/.ssh
RUN touch ~/.ssh/config
RUN echo "Host github.com" >> ~/.ssh/config \
&& echo " IdentityFile ~/.ssh/id_ed25519" >> ~/.ssh/config
RUN touch ~/.ssh/id_ed25519
# --------------------------------------

View File

@@ -1,149 +0,0 @@
# Kestra Devcontainer
This devcontainer provides a quick and easy setup for anyone using VSCode to get up and running quickly with this project to start development on either the frontend or backend. It bootstraps a docker container for you to develop inside of without the need to manually setup the environment.
---
## INSTRUCTIONS
### Setup:
Take a look at this guide to get an idea of what the setup is like as this devcontainer setup follows this approach: https://kestra.io/docs/getting-started/contributing
Once you have this repo cloned to your local system, you will need to install the VSCode extension [Remote Development](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack).
Then run the following command from the command palette:
`Dev Containers: Open Folder in Container...` and select your Kestra root folder.
This will then put you inside a docker container ready for development.
NOTE: you'll need to wait for the gradle build to finish and compile Java files but this process should happen automatically within VSCode.
In the meantime, you can move onto the next step...
---
### Development:
- Create a `.env.development.local` file in the `ui` folder and paste the following:
```bash
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
VITE_APP_API_URL=http://localhost:8080
```
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
Now you have two choices:
`Local mode`:
Runs the Kestra server in local mode which uses a H2 database, so this is the only config you'd need:
```yaml
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
You can then open a new terminal and run the following command to start the backend server: `./gradlew runLocal`
`Standalone mode`:
Runs in standalone mode which uses Postgres. Make sure to have a local Postgres instance already running on localhost:
```yaml
kestra:
repository:
type: postgres
storage:
type: local
local:
base-path: "/app/storage"
queue:
type: postgres
tasks:
tmp-dir:
path: /tmp/kestra-wd/tmp
anonymous-usage-report:
enabled: false
server:
basic-auth:
enabled: false
datasources:
postgres:
# It is important to note that you must use the "host.docker.internal" host when connecting to a docker container outside of your devcontainer as attempting to use localhost will only point back to this devcontainer.
url: jdbc:postgresql://host.docker.internal:5432/kestra
driverClassName: org.postgresql.Driver
username: kestra
password: k3str4
flyway:
datasources:
postgres:
enabled: true
locations:
- classpath:migrations/postgres
# We must ignore missing migrations as we may delete the wrong ones or delete those that are not used anymore.
ignore-migration-patterns: "*:missing,*:future"
out-of-order: true
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
Then add the following settings to the `.vscode/launch.json` file:
```json
{
"version": "0.2.0",
"configurations": [
{
"type": "java",
"name": "Kestra Standalone",
"request": "launch",
"mainClass": "io.kestra.cli.App",
"projectName": "cli",
"args": "server standalone"
}
]
}
```
You can then use the VSCode `Run and Debug` extension to start the Kestra server.
Additionally, if you're doing frontend development, you can run `npm run dev` from the `ui` folder after having the above running (which will provide a backend) to access your application from `localhost:5173`. This has the benefit to watch your changes and hot-reload upon doing frontend changes.
#### Plugins
If you want your plugins to be loaded inside your devcontainer, point the `source` field to a folder containing jars of the plugins you want to embed in the following snippet in `devcontainer.json`:
```
"mounts": [
{
"source": "/absolute/path/to/your/local/jar/plugins/folder",
"target": "/workspaces/kestra/local/plugins",
"type": "bind"
}
],
```
---
### GIT
If you want to commit to GitHub, make sure to navigate to the `~/.ssh` folder and either create a new SSH key or override the existing `id_ed25519` file and paste an existing SSH key from your local machine into this file. You will then need to change the permissions of the file by running: `chmod 600 id_ed25519`. This will allow you to then push to GitHub.
---

View File

@@ -1,46 +0,0 @@
{
"name": "kestra",
"build": {
"context": ".",
"dockerfile": "Dockerfile"
},
"workspaceFolder": "/workspaces/kestra",
"forwardPorts": [5173, 8080],
"customizations": {
"vscode": {
"settings": {
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/bin/zsh"
}
},
"workbench.iconTheme": "vscode-icons",
"editor.tabSize": 4,
"editor.formatOnSave": true,
"files.insertFinalNewline": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"telemetry.telemetryLevel": "off",
"editor.bracketPairColorization.enabled": true,
"editor.guides.bracketPairs": "active"
},
"extensions": [
"redhat.vscode-yaml",
"dbaeumer.vscode-eslint",
"vscode-icons-team.vscode-icons",
"eamodio.gitlens",
"esbenp.prettier-vscode",
"aaron-bond.better-comments",
"codeandstuff.package-json-upgrade",
"andys8.jest-snippets",
"oderwat.indent-rainbow",
"evondev.indent-rainbow-palettes",
"formulahendry.auto-rename-tag",
"IronGeek.vscode-env",
"yoavbls.pretty-ts-errors",
"github.vscode-github-actions",
"vscjava.vscode-java-pack",
"ms-azuretools.vscode-docker"
]
}
}
}

View File

@@ -31,16 +31,12 @@ Watch out for duplicates! If you are creating a new issue, please check existing
#### Requirements
The following dependencies are required to build Kestra locally:
- Java 21+
- Node 18+ and npm
- Java 17+, Kestra runs on Java 11 but we hit a Java compiler bug fixed in Java 17
- Node 14+ and npm
- Python 3, pip and python venv
- Docker & Docker Compose
- an IDE (Intellij IDEA, Eclipse or VS Code)
Thanks to the Kestra community, if using VSCode, you can also start development on either the frontend or backend with a bootstrapped docker container without the need to manually set up the environment.
Check out the [README](../.devcontainer/README.md) for set-up instructions and the associated [Dockerfile](../.devcontainer/Dockerfile) in the respository to get started.
To start contributing:
- [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the repository
- Clone the fork on your workstation:
@@ -50,23 +46,20 @@ git clone git@github.com:{YOUR_USERNAME}/kestra.git
cd kestra
```
#### Develop on the backend
#### Develop backend
The backend is made with [Micronaut](https://micronaut.io).
Open the cloned repository in your favorite IDE. In most of decent IDEs, Gradle build will be detected and all dependencies will be downloaded.
You can also build it from a terminal using `./gradlew build`, the Gradle wrapper will download the right Gradle version to use.
- You may need to enable java annotation processors since we are using them.
- On IntelliJ IDEA, click on **Run -> Edit Configurations -> + Add new Configuration** to create a run configuration to start Kestra.
- The main class is `io.kestra.cli.App` from module `kestra.cli.main`.
- Pass as program arguments the server you want to work with, for example `server local` will start the [standalone local](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies). You can also use `server standalone` and use the provided `docker-compose-ci.yml` Docker compose file to start a standalone server with a real database as a backend that would need to be configured properly.
- Configure the following environment variables:
- `MICRONAUT_ENVIRONMENTS`: can be set to any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`.
- `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on startup.
- See the screenshot below for an example: ![Intellij IDEA Configuration ](run-app.png)
- If you encounter **JavaScript memory heap out** error during startup, configure `NODE_OPTIONS` environment variable with some large value.
- Example `NODE_OPTIONS: --max-old-space-size=4096` or `NODE_OPTIONS: --max-old-space-size=8192` ![Intellij IDEA Configuration ](node_option_env_var.png)
- The server starts by default on port 8080 and is reachable on `http://localhost:8080`
- You may need to enable java annotation processors since we are using it a lot.
- The main class is `io.kestra.cli.App` from module `kestra.cli.main`
- Pass as program arguments the server you want to develop, for example `server local` will start the [standalone local](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies)
- ![Intellij Idea Configuration ](https://user-images.githubusercontent.com/2064609/161399626-1b681add-cfa8-4e0e-a843-2631cc59758d.png) Intellij Idea configuration can be found in screenshot below.
- `MICRONAUT_ENVIRONMENTS`: can be set any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`
- `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on the startup.
- You can also use the gradle task `./gradlew runLocal` that will run a standalone server with `MICRONAUT_ENVIRONMENTS=override` and plugins path `local/plugins`
- The server start by default on port 8080 and is reachable on `http://localhost:8080`
If you want to launch all tests, you need Python and some packages installed on your machine, on Ubuntu you can install them with:
@@ -76,17 +69,17 @@ python3 -m pip install virtualenv
```
#### Develop on the frontend
#### Develop frontend
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
- `npm install`
- create a file `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
- `npm install --force` (force is need because of some conflicting package)
- create a files `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
- `npm run dev` will start the development server with hot reload.
- The server start by default on port 5173 and is reachable on `http://localhost:5173`
- The server start by default on port 8090 and is reachable on `http://localhost:5173`
- You can run `npm run build` in order to build the front-end that will be delivered from the backend (without running the `npm run dev`) above.
Now, you need to start a backend server, you could:
- start a [local server](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies) without a database using this docker-compose file already configured with CORS enabled:
- start a [local server](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies) without database using this docker-compose file already configured with CORS enabled:
```yaml
services:
kestra:
@@ -106,7 +99,7 @@ services:
ports:
- "8080:8080"
```
- start the [Develop backend](#develop-backend) from your IDE, you need to configure CORS restrictions when using the local development npm server, changing the backend configuration allowing the http://localhost:5173 origin in `cli/src/main/resources/application-override.yml`
- start the [Develop backend](#develop-backend) from your IDE and you need to configure CORS restrictions when using the local development npm server, changing the backend configuration allowing the http://localhost:5173 origin in `cli/src/main/resources/application-override.yml`
```yaml
micronaut:
@@ -140,4 +133,4 @@ A complete documentation for developing plugin can be found [here](https://kestr
### Improving The Documentation
The main documentation is located in a separate [repository](https://github.com/kestra-io/kestra.io).
For tasks documentation, they are located directly in the Java source, using [Swagger annotations](https://github.com/swagger-api/swagger-core/wiki/Swagger-2.X---Annotations) (Example: [for Bash tasks](https://github.com/kestra-io/kestra/blob/develop/core/src/main/java/io/kestra/core/tasks/scripts/AbstractBash.java))
For tasks documentation, they are located directly on Java source using [Swagger annotations](https://github.com/swagger-api/swagger-core/wiki/Swagger-2.X---Annotations) (Example: [for Bash tasks](https://github.com/kestra-io/kestra/blob/develop/core/src/main/java/io/kestra/core/tasks/scripts/AbstractBash.java))

54
.github/ISSUE_TEMPLATE/blueprint.yml vendored Normal file
View File

@@ -0,0 +1,54 @@
name: Blueprint
description: Add a new blueprint
body:
- type: markdown
attributes:
value: |
Please fill out all the fields listed below. This will help us review and add your blueprint faster.
- type: textarea
attributes:
label: Blueprint title
description: A title briefly describing what the blueprint does, ideally in a verb phrase + noun format.
placeholder: E.g. "Upload a file to service X, then run Y and Z"
validations:
required: true
- type: textarea
attributes:
label: Source code
description: Flow code that will appear on the Blueprint page.
placeholder: |
```yaml
id: yourFlowId
namespace: blueprint
tasks:
- id: taskName
type: task_type
```
validations:
required: true
- type: textarea
attributes:
label: About this blueprint
description: "A concise markdown documentation about the blueprint's configuration and usage."
placeholder: |
E.g. "This flow downloads a file and uploads it to an S3 bucket. This flow assumes AWS credentials stored as environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`."
validations:
required: false
- type: textarea
attributes:
label: Tags (optional)
description: Blueprint categories such as Ingest, Transform, Analyze, Python, Docker, AWS, GCP, Azure, etc.
placeholder: |
- Ingest
- Transform
- AWS
validations:
required: false
labels:
- blueprint

View File

@@ -4,7 +4,9 @@ body:
- type: markdown
attributes:
value: |
Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack).
Thanks for reporting an issue! Please provide a [Minima Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example)
and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs.
NOTE: If your issue is more of a question, please ping us directly on [Slack](https://kestra.io/slack).
- type: textarea
attributes:
label: Describe the issue
@@ -17,10 +19,10 @@ body:
label: Environment
description: Environment information where the problem occurs.
value: |
- Kestra Version: develop
- Kestra Version:
- Operating System (OS/Docker/Kubernetes):
- Java Version (if you don't run kestra in Docker):
validations:
required: false
labels:
- bug
- area/backend
- area/frontend
- bug

View File

@@ -1,13 +1,15 @@
name: Feature request
description: Create a new feature request
body:
- type: markdown
attributes:
value: |
Please describe the feature you want for Kestra to implement, before that check if there is already an existing issue to add it.
- type: textarea
attributes:
label: Feature description
placeholder: Tell us more about your feature request
placeholder: Tell us what feature you would like for Kestra to have and what problem is it going to solve
validations:
required: true
labels:
- enhancement
- area/backend
- area/frontend

8
.github/ISSUE_TEMPLATE/other.yml vendored Normal file
View File

@@ -0,0 +1,8 @@
name: Other
description: Something different
body:
- type: textarea
attributes:
label: Issue description
validations:
required: true

View File

@@ -13,9 +13,6 @@ outputs:
plugins:
description: "List of all Kestra plugins"
value: ${{ steps.plugins.outputs.plugins }}
repositories:
description: "List of all Kestra repositories of plugins"
value: ${{ steps.plugins.outputs.repositories }}
runs:
using: composite
steps:
@@ -23,7 +20,5 @@ runs:
id: plugins
shell: bash
run: |
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | cut -d':' -f2- | xargs || echo '');
REPOSITORIES=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | cut -d':' -f1 | uniq | sort | xargs || echo '')
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | xargs || echo '');
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
echo "repositories=$REPOSITORIES" >> $GITHUB_OUTPUT

View File

@@ -32,7 +32,11 @@ updates:
timezone: "Europe/Paris"
open-pull-requests-limit: 50
labels: ["dependency-upgrade"]
reviewers: ["MilosPaunovic"]
ignore:
# Ignore updates of version 1.x, as we're using beta of 2.x
- dependency-name: "vue-virtual-scroller"
versions: ["1.x"]
# Ignore major versions greater than 8, as it's still known to be flaky
- dependency-name: "eslint"
versions: [">8"]

Binary file not shown.

Before

Width:  |  Height:  |  Size: 130 KiB

BIN
.github/run-app.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 210 KiB

View File

@@ -1,67 +0,0 @@
name: Auto-Translate UI keys and create PR
on:
schedule:
- cron: "0 9-21 * * *" # Every hour from 9 AM to 9 PM
workflow_dispatch:
inputs:
retranslate_modified_keys:
description: "Whether to re-translate modified keys even if they already have translations."
type: choice
options:
- "false"
- "true"
default: "false"
required: false
jobs:
translations:
name: Translations
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
name: Checkout
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install Python dependencies
run: pip install gitpython openai
- name: Generate translations
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
- name: Set up Node
uses: actions/setup-node@v4
with:
node-version: "20.x"
- name: Check keys matching
run: node ui/src/translations/check.js
- name: Set up Git
run: |
git config --global user.name "GitHub Action"
git config --global user.email "actions@github.com"
- name: Commit and create PR
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH_NAME="chore/update-translations-$(date +%s)"
git checkout -b $BRANCH_NAME
git add ui/src/translations/*.json
if git diff --cached --quiet; then
echo "No changes to commit. Exiting with success."
exit 0
fi
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller

117
.github/workflows/check.yml vendored Normal file
View File

@@ -0,0 +1,117 @@
name: Daily Core check
on:
schedule:
- cron: '0 5 * * *'
jobs:
check:
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
name: Check & Publish
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
# Checkout
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
ref: main
# Setup build
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
caches-enabled: true
# Services
- name: Build the docker-compose stack
run: docker compose -f docker-compose-ci.yml up -d
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
# Gradle check
- name: Build with Gradle
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
run: |
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
./gradlew check javadoc --parallel --refresh-dependencies
# Sonar
- name: Analyze with Sonar
if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
run: ./gradlew sonar --info
# Allure check
- name: Auth to Google Cloud
id: auth
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
uses: 'google-github-actions/auth@v2'
with:
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
- uses: rlespinasse/github-slug-action@v4
- name: Publish allure report
uses: andrcuns/allure-publish-action@v2.7.1
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
env:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
JAVA_HOME: /usr/lib/jvm/default-jvm/
with:
storageType: gcs
resultsGlob: "**/build/allure-results"
bucket: internal-kestra-host
baseUrl: "https://internal.kestra.io"
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/java') }}
copyLatest: true
ignoreMissingResults: true
# Jacoco
- name: 'Set up Cloud SDK'
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
uses: 'google-github-actions/setup-gcloud@v2'
- name: 'Copy jacoco files'
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
run: |
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/jacocoTestReport.xml
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'jacoco') }}
# report test
- name: Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure()
with:
report_paths: '**/build/test-results/**/TEST-*.xml'
# Codecov
- uses: codecov/codecov-action@v4
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Notify failed CI
id: send-ci-failed
if: always() && job.status != 'success'
uses: kestra-io/actions/.github/actions/send-ci-failed@main
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@@ -6,11 +6,11 @@
name: "CodeQL"
on:
push:
branches: [develop]
schedule:
- cron: '0 5 * * 1'
workflow_dispatch: {}
jobs:
analyze:
name: Analyze
@@ -51,23 +51,13 @@ jobs:
# Set up JDK
- name: Set up JDK
uses: actions/setup-java@v4
if: ${{ matrix.language == 'java' }}
with:
distribution: 'temurin'
java-version: 21
- name: Setup gradle
if: ${{ matrix.language == 'java' }}
uses: gradle/actions/setup-gradle@v4
- name: Build with Gradle
if: ${{ matrix.language == 'java' }}
run: ./gradlew testClasses -x :ui:assembleFrontend
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
if: ${{ matrix.language != 'java' }}
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.

View File

@@ -1,4 +1,4 @@
name: Create Docker images on Release
name: Create Docker images on tag
on:
workflow_dispatch:
@@ -11,10 +11,6 @@ on:
options:
- "true"
- "false"
release-tag:
description: 'Kestra Release Tag'
required: false
type: string
plugin-version:
description: 'Plugin version'
required: false
@@ -42,16 +38,17 @@ jobs:
name: Publish Docker
needs: [ plugins ]
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/v')
strategy:
matrix:
image:
- name: "-no-plugins"
plugins: ""
packages: jattach
packages: ""
python-libs: ""
- name: ""
plugins: ${{needs.plugins.outputs.plugins}}
packages: python3 python-is-python3 python3-pip curl jattach
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
python-libs: kestra
steps:
- uses: actions/checkout@v4
@@ -60,22 +57,13 @@ jobs:
- name: Set image name
id: vars
run: |
if [[ "${{ inputs.release-tag }}" == "" ]]; then
TAG=${GITHUB_REF#refs/*/}
echo "tag=${TAG}" >> $GITHUB_OUTPUT
else
TAG="${{ inputs.release-tag }}"
echo "tag=${TAG}" >> $GITHUB_OUTPUT
fi
TAG=${GITHUB_REF#refs/*/}
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
else
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
fi
# Download release
- name: Download release
uses: robinraju/release-downloader@v1.12
uses: robinraju/release-downloader@v1.11
with:
tag: ${{steps.vars.outputs.tag}}
fileName: 'kestra-*'
@@ -89,11 +77,6 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Fix Qemu
shell: bash
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

View File

@@ -42,13 +42,19 @@ jobs:
with:
path: kestra
# Checkout GitHub Actions
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
ref: main
# Setup build
- uses: kestra-io/actions/.github/actions/setup-build@main
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
caches-enabled: true
# Get Docker Image
- name: Download Kestra Image
@@ -140,10 +146,10 @@ jobs:
with:
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
- uses: rlespinasse/github-slug-action@v5
- uses: rlespinasse/github-slug-action@v4
- name: Publish allure report
uses: andrcuns/allure-publish-action@v2.9.0
uses: andrcuns/allure-publish-action@v2.7.1
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
env:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
@@ -152,7 +158,7 @@ jobs:
storageType: gcs
resultsGlob: build/allure-results
bucket: internal-kestra-host
baseUrl: "https://internal.dev.kestra.io"
baseUrl: "https://internal.kestra.io"
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/playwright') }}
copyLatest: true
ignoreMissingResults: true

View File

@@ -0,0 +1,111 @@
name: Generate Translations
on:
pull_request:
types: [opened, synchronize]
paths:
- "ui/src/translations/en.json"
push:
branches:
- develop
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
jobs:
commit:
name: Commit directly to PR
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.head.repo.fork == false }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 10
ref: ${{ github.head_ref }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install Python dependencies
run: pip install gitpython openai
- name: Generate translations
run: python ui/src/translations/generate_translations.py
- name: Set up Node
uses: actions/setup-node@v4
with:
node-version: "20.x"
- name: Check keys matching
run: node ui/src/translations/check.js
- name: Set up Git
run: |
git config --global user.name "GitHub Action"
git config --global user.email "actions@github.com"
- name: Check for changes and commit
env:
GH_TOKEN: ${{ github.token }}
run: |
git add ui/src/translations/*.json
if git diff --cached --quiet; then
echo "No changes to commit. Exiting with success."
exit 0
fi
git commit -m "chore(translations): auto generate values for languages other than english"
git push origin ${{ github.head_ref }}
pull_request:
name: Open PR for a forked repository
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.head.repo.fork == true }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 10
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install Python dependencies
run: pip install gitpython openai
- name: Generate translations
run: python ui/src/translations/generate_translations.py
- name: Set up Node
uses: actions/setup-node@v4
with:
node-version: "20.x"
- name: Check keys matching
run: node ui/src/translations/check.js
- name: Set up Git
run: |
git config --global user.name "GitHub Action"
git config --global user.email "actions@github.com"
- name: Create and push a new branch
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH_NAME="generated-translations-${{ github.event.pull_request.head.repo.name }}"
git checkout -b $BRANCH_NAME
git add ui/src/translations/*.json
if git diff --cached --quiet; then
echo "No changes to commit. Exiting with success."
exit 0
fi
git commit -m "chore(translations): auto generate values for languages other than english"
git push origin $BRANCH_NAME

View File

@@ -1,82 +0,0 @@
name: Run Gradle Release for Kestra Plugins
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0-rc1)'
required: true
type: string
nextVersion:
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
required: true
type: string
dryRun:
description: 'Use DRY_RUN mode'
required: false
default: 'false'
jobs:
release:
name: Release plugins
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
ref: main
# Setup build
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
caches-enabled: true
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
id: plugins-list
with:
plugin-version: 'LATEST'
- name: 'Configure Git'
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
if: ${{ github.event.inputs.dryRun == 'false' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/release-plugins.sh;
./dev-tools/release-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--next-version=${{github.event.inputs.nextVersion}} \
--yes \
${{ steps.plugins-list.outputs.repositories }}
- name: Run Gradle Release (DRY_RUN)
if: ${{ github.event.inputs.dryRun == 'true' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/release-plugins.sh;
./dev-tools/release-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--next-version=${{github.event.inputs.nextVersion}} \
--dry-run \
--yes \
${{ steps.plugins-list.outputs.repositories }}

View File

@@ -1,92 +0,0 @@
name: Run Gradle Release
run-name: "Releasing Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0-rc1)'
required: true
type: string
nextVersion:
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
required: true
type: string
env:
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
NEXT_VERSION: "${{ github.event.inputs.nextVersion }}"
jobs:
release:
name: Release Kestra
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/develop'
steps:
# Checks
- name: Check Inputs
run: |
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$ ]]; then
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$"
exit 1
fi
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
exit 1;
fi
# Checkout
- uses: actions/checkout@v4
with:
fetch-depth: 0
path: kestra
# Checkout GitHub Actions
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
ref: main
# Setup build
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
caches-enabled: true
- name: Configure Git
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
cd kestra
# Create and push release branch
git checkout -b "$PUSH_RELEASE_BRANCH";
git push -u origin "$PUSH_RELEASE_BRANCH";
# Run gradle release
git checkout develop;
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
# -SNAPSHOT qualifier maybe used to test release-candidates
./gradlew release -Prelease.useAutomaticVersion=true \
-Prelease.releaseVersion="${RELEASE_VERSION}" \
-Prelease.newVersion="${NEXT_VERSION}" \
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}" \
-Prelease.failOnSnapshotDependencies=false
else
./gradlew release -Prelease.useAutomaticVersion=true \
-Prelease.releaseVersion="${RELEASE_VERSION}" \
-Prelease.newVersion="${NEXT_VERSION}" \
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}"
fi

View File

@@ -1,59 +1,451 @@
name: Main Workflow
name: Main
on:
workflow_dispatch:
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: true
type: string
push:
branches:
- master
- main
- releases/*
- develop
tags:
- v*
pull_request:
branches:
- develop
repository_dispatch:
types: [ rebuild ]
workflow_dispatch:
inputs:
skip-test:
description: 'Skip test'
type: choice
required: true
default: 'false'
options:
- "true"
- "false"
plugin-version:
description: 'Plugin version'
required: false
type: string
default: "LATEST"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-main
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
JAVA_VERSION: '21'
DOCKER_APT_PACKAGES: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
DOCKER_PYTHON_LIBRARIES: kestra
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
jobs:
tests:
name: Execute tests
uses: ./.github/workflows/workflow-test.yml
build-artifacts:
name: Build Artifacts
runs-on: ubuntu-latest
timeout-minutes: 60
outputs:
docker-tag: ${{ steps.vars.outputs.tag }}
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
plugins: ${{ steps.plugins-list.outputs.plugins }}
steps:
# Checkout
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
ref: main
# Setup build
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
caches-enabled: true
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
if: "!startsWith(github.ref, 'refs/tags/v')"
id: plugins-list
with:
plugin-version: ${{ env.PLUGIN_VERSION }}
# Set Plugins List
- name: Set Plugin List
id: plugins
if: "!startsWith(github.ref, 'refs/tags/v')"
run: |
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" || $TAG == v* ]]; then
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
else
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
fi
# Build
- name: Build with Gradle
run: |
./gradlew executableJar
- name: Copy exe to image
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker Tag
- name: Set up Vars
id: vars
run: |
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" ]]
then
TAG="latest";
elif [[ $TAG = "develop" ]]
then
TAG="develop";
elif [[ $TAG = v* ]]
then
TAG="${TAG}";
else
TAG="build-${{ github.run_id }}";
fi
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
# Docker setup
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Build
- name: Build & Export Docker Image
uses: docker/build-push-action@v6
if: "!startsWith(github.ref, 'refs/tags/v')"
with:
context: .
push: false
file: Dockerfile
tags: |
kestra/kestra:${{ steps.vars.outputs.tag }}
build-args: |
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
# Upload artifacts
- name: Upload JAR
uses: actions/upload-artifact@v4
with:
name: jar
path: build/libs/
- name: Upload Executable
uses: actions/upload-artifact@v4
with:
name: exe
path: build/executable/
- name: Upload Docker
uses: actions/upload-artifact@v4
if: "!startsWith(github.ref, 'refs/tags/v')"
with:
name: ${{ steps.vars.outputs.artifact }}
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
# Run Reusable Workflow from QA repository
check-e2e:
name: Check E2E Tests
needs: build-artifacts
if: ${{ (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') && !startsWith(github.ref, 'refs/tags/v') }}
uses: ./.github/workflows/e2e.yml
strategy:
fail-fast: false
matrix:
backends: ["postgres"]
with:
report-status: false
tags: oss
docker-artifact-name: ${{ needs.build-artifacts.outputs.docker-artifact-name }}
docker-image-tag: kestra/kestra:${{ needs.build-artifacts.outputs.docker-tag }}
backend: ${{ matrix.backends }}
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
check:
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
name: Check & Publish
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
# Checkout
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
ref: main
# Setup build
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
caches-enabled: true
# Services
- name: Build the docker-compose stack
run: docker compose -f docker-compose-ci.yml up -d
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
# Gradle check
- name: Build with Gradle
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
run: |
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
./gradlew check javadoc --parallel
# Sonar
- name: Analyze with Sonar
if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
run: ./gradlew sonar --info
# Allure check
- name: Auth to Google Cloud
id: auth
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
uses: 'google-github-actions/auth@v2'
with:
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
- uses: rlespinasse/github-slug-action@v4
- name: Publish allure report
uses: andrcuns/allure-publish-action@v2.7.1
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
env:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
JAVA_HOME: /usr/lib/jvm/default-jvm/
with:
storageType: gcs
resultsGlob: "**/build/allure-results"
bucket: internal-kestra-host
baseUrl: "https://internal.kestra.io"
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/java') }}
copyLatest: true
ignoreMissingResults: true
# Jacoco
- name: 'Set up Cloud SDK'
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
uses: 'google-github-actions/setup-gcloud@v2'
- name: 'Copy jacoco files'
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
run: |
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/jacocoTestReport.xml
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'jacoco') }}
# report test
- name: Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure()
with:
report_paths: '**/build/test-results/**/TEST-*.xml'
# Codecov
- uses: codecov/codecov-action@v4
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
with:
token: ${{ secrets.CODECOV_TOKEN }}
release:
name: Release
needs: [tests]
if: "!startsWith(github.ref, 'refs/heads/releases')"
uses: ./.github/workflows/workflow-release.yml
with:
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
name: Github Release
runs-on: ubuntu-latest
needs: [ check, check-e2e ]
if: |
always() &&
startsWith(github.ref, 'refs/tags/v') &&
needs.check.result == 'success' &&
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
steps:
# Download Exec
- name: Download executable
uses: actions/download-artifact@v4
if: startsWith(github.ref, 'refs/tags/v')
with:
name: exe
path: build/executable
# GitHub Release
- name: Create GitHub release
uses: "marvinpinto/action-automatic-releases@latest"
if: startsWith(github.ref, 'refs/tags/v')
continue-on-error: true
with:
repo_token: "${{ secrets.GITHUB_TOKEN }}"
prerelease: false
files: |
build/executable/*
docker:
name: Publish Docker
runs-on: ubuntu-latest
needs: [build-artifacts, check, check-e2e]
if: github.ref == 'refs/heads/develop'
strategy:
matrix:
image:
- tag: ${{needs.build-artifacts.outputs.docker-tag}}-no-plugins
packages: ""
python-libraries: ""
- tag: ${{needs.build-artifacts.outputs.docker-tag}}
plugins: ${{needs.build-artifacts.outputs.plugins}}
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
python-libraries: kestra
steps:
- uses: actions/checkout@v4
# Docker setup
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
# Vars
- name: Set image name
id: vars
run: |
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" || $TAG == v* ]]; then
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
else
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
fi
# Build Docker Image
- name: Download executable
uses: actions/download-artifact@v4
with:
name: exe
path: build/executable
- name: Copy exe to image
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker Build and push
- name: Build Docker Image
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: kestra/kestra:${{ matrix.image.tag }}
platforms: linux/amd64,linux/arm64
build-args: |
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
APT_PACKAGES=${{matrix.image.packages}}
PYTHON_LIBRARIES=${{matrix.image.python-libraries}}
maven:
name: Publish to Maven
runs-on: ubuntu-latest
needs: [check, check-e2e]
if: |
always() &&
github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/tags/v') &&
needs.check.result == 'success' &&
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
steps:
- uses: actions/checkout@v4
# Checkout GitHub Actions
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
ref: main
# Setup build
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
caches-enabled: true
# Publish
- name: Publish package to Sonatype
if: github.ref == 'refs/heads/develop'
env:
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
run: |
mkdir -p ~/.gradle/
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
./gradlew publishToSonatype
# Release
- name: Release package to Maven Central
if: startsWith(github.ref, 'refs/tags/v')
env:
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
run: |
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository
end:
runs-on: ubuntu-latest
needs:
- check-e2e
- check
- maven
- docker
- release
if: always()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
steps:
# Update
- name: Github - Update internal
- name: Update internal
uses: benc-uk/workflow-dispatch@v1
if: github.ref == 'refs/heads/develop' && needs.docker.result == 'success'
with:
@@ -63,12 +455,12 @@ jobs:
token: ${{ secrets.GH_PERSONAL_TOKEN }}
# Slack
- name: Slack - Notification
- name: Slack notification
uses: Gamesight/slack-workflow-status@master
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
name: GitHub Actions
icon_emoji: ":github-actions:"
channel: "C02DQ1A7JLR" # _int_git channel
icon_emoji: ':github-actions:'
channel: 'C02DQ1A7JLR' # _int_git channel

View File

@@ -1,74 +0,0 @@
name: Pull Request Workflow
on:
pull_request:
branches:
- develop
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
cancel-in-progress: true
jobs:
# ********************************************************************************************************************
# File changes detection
# ********************************************************************************************************************
file-changes:
if: ${{ github.event.pull_request.draft == false }}
name: File changes detection
runs-on: ubuntu-latest
timeout-minutes: 60
outputs:
ui: ${{ steps.changes.outputs.ui }}
translations: ${{ steps.changes.outputs.translations }}
backend: ${{ steps.changes.outputs.backend }}
steps:
- uses: dorny/paths-filter@v3
id: changes
with:
filters: |
ui:
- 'ui/**'
backend:
- '!{ui,.github}/**'
token: ${{ secrets.GITHUB_TOKEN }}
# ********************************************************************************************************************
# Tests
# ********************************************************************************************************************
frontend:
name: Frontend - Tests
needs: [file-changes]
if: "needs.file-changes.outputs.ui == 'true'"
uses: ./.github/workflows/workflow-frontend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
backend:
name: Backend - Tests
needs: file-changes
if: "needs.file-changes.outputs.backend == 'true'"
uses: ./.github/workflows/workflow-backend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
end:
name: End
runs-on: ubuntu-latest
if: always()
needs: [frontend, backend]
steps:
# Slack
- name: Slack notification
uses: Gamesight/slack-workflow-status@master
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
name: GitHub Actions
icon_emoji: ":github-actions:"
channel: "C02DQ1A7JLR"

View File

@@ -1,60 +0,0 @@
name: Set Version and Tag Plugins
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0)'
required: true
type: string
dryRun:
description: 'Use DRY_RUN mode'
required: false
default: 'false'
jobs:
tag:
name: Release plugins
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
id: plugins-list
with:
plugin-version: 'LATEST'
- name: 'Configure Git'
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Set Version and Tag Plugins
if: ${{ github.event.inputs.dryRun == 'false' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/setversion-tag-plugins.sh;
./dev-tools/setversion-tag-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--yes \
${{ steps.plugins-list.outputs.repositories }}
- name: Set Version and Tag Plugins (DRY_RUN)
if: ${{ github.event.inputs.dryRun == 'true' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/setversion-tag-plugins.sh;
./dev-tools/setversion-tag-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--dry-run \
--yes \
${{ steps.plugins-list.outputs.repositories }}

View File

@@ -1,57 +0,0 @@
name: Set Version and Tag
run-name: "Set version and Tag Kestra to ${{ github.event.inputs.releaseVersion }} 🚀"
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.1)'
required: true
type: string
env:
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
jobs:
release:
name: Release Kestra
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/heads/releases/v')
steps:
# Checks
- name: Check Inputs
run: |
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)(\.[0-9]+)(-rc[0-9])?(-SNAPSHOT)?$ ]]; then
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
exit 1
fi
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
CURRENT_BRANCH="$GITHUB_REF"
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
exit 1
fi
# Checkout
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure Git
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
# Update version
sed -i "s/^version=.*/version=$RELEASE_VERSION/" ./gradle.properties
git add ./gradle.properties
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
git push
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
git push origin "v$RELEASE_VERSION"

View File

@@ -8,9 +8,6 @@ on:
env:
JAVA_VERSION: '21'
permissions:
contents: read
jobs:
dependency-check:
name: Dependency Check
@@ -36,16 +33,8 @@ jobs:
node-enabled: true
caches-enabled: true
# Npm
- name: Npm - Install
shell: bash
working-directory: ui
run: npm ci
# Run OWASP dependency check plugin
- name: Gradle Dependency Check
env:
NVD_API_KEY: ${{ secrets.NIST_APIKEY }}
run: |
./gradlew dependencyCheckAggregate
@@ -60,10 +49,6 @@ jobs:
develop-image-check:
name: Image Check (develop)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v4
@@ -87,28 +72,16 @@ jobs:
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.30.0
uses: aquasecurity/trivy-action@0.24.0
with:
image-ref: kestra/kestra:develop
format: 'template'
template: '@/contrib/sarif.tpl'
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
format: table
skip-dirs: /app/plugins
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
category: docker-
scanners: vuln
latest-image-check:
name: Image Check (latest)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v4
@@ -132,16 +105,9 @@ jobs:
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.30.0
uses: aquasecurity/trivy-action@0.24.0
with:
image-ref: kestra/kestra:latest
format: table
skip-dirs: /app/plugins
scanners: vuln
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
scanners: vuln

View File

@@ -1,142 +0,0 @@
name: Backend - Tests
on:
workflow_call:
secrets:
GITHUB_AUTH_TOKEN:
description: "The GitHub Token."
required: true
CODECOV_TOKEN:
description: 'Codecov Token'
required: true
SONAR_TOKEN:
description: 'Sonar Token'
required: true
GOOGLE_SERVICE_ACCOUNT:
description: 'Google Service Account'
required: true
permissions:
contents: write
checks: write
actions: read
jobs:
test:
name: Backend - Tests
runs-on: ubuntu-latest
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
steps:
- uses: actions/checkout@v4
name: Checkout - Current ref
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/.github/actions/setup-build@main
name: Setup - Build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
# Services
- name: Setup - Start docker compose
shell: bash
run: docker compose -f docker-compose-ci.yml up -d
# Gradle check
- name: Gradle - Build
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
shell: bash
run: |
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
./gradlew check javadoc --parallel
# report test
- name: Test - Publish Test Results
uses: dorny/test-reporter@v2
if: always()
with:
name: Java Tests Report
reporter: java-junit
path: '**/build/test-results/test/TEST-*.xml'
list-suites: 'failed'
list-tests: 'failed'
fail-on-error: 'false'
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
# Sonar
- name: Test - Analyze with Sonar
if: env.SONAR_TOKEN != ''
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
shell: bash
run: ./gradlew sonar --info
# GCP
- name: GCP - Auth with unit test account
id: auth
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
continue-on-error: true
uses: "google-github-actions/auth@v2"
with:
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
- name: GCP - Setup Cloud SDK
if: env.GOOGLE_SERVICE_ACCOUNT != ''
uses: "google-github-actions/setup-gcloud@v2"
# Allure check
- uses: rlespinasse/github-slug-action@v5
name: Allure - Generate slug variables
- name: Allure - Publish report
uses: andrcuns/allure-publish-action@v2.9.0
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
continue-on-error: true
env:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
JAVA_HOME: /usr/lib/jvm/default-jvm/
with:
storageType: gcs
resultsGlob: "**/build/allure-results"
bucket: internal-kestra-host
baseUrl: "https://internal.dev.kestra.io"
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
copyLatest: true
ignoreMissingResults: true
# Jacoco
- name: Jacoco - Copy reports
if: env.GOOGLE_SERVICE_ACCOUNT != ''
continue-on-error: true
shell: bash
run: |
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
# Codecov
- name: Codecov - Upload coverage reports
uses: codecov/codecov-action@v5
if: ${{ !cancelled() }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend
- name: Codecov - Upload test results
uses: codecov/test-results-action@v1
if: ${{ !cancelled() }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend

View File

@@ -1,152 +0,0 @@
name: Build Artifacts
on:
workflow_call:
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: true
type: string
outputs:
docker-tag:
value: ${{ jobs.build.outputs.docker-tag }}
description: "The Docker image Tag for Kestra"
docker-artifact-name:
value: ${{ jobs.build.outputs.docker-artifact-name }}
description: "The GitHub artifact containing the Kestra docker image name."
plugins:
value: ${{ jobs.build.outputs.plugins }}
description: "The Kestra plugins list used for the build."
jobs:
build:
name: Build - Artifacts
runs-on: ubuntu-latest
outputs:
docker-tag: ${{ steps.vars.outputs.tag }}
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
plugins: ${{ steps.plugins.outputs.plugins }}
env:
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
steps:
- name: Checkout - Current ref
uses: actions/checkout@v4
with:
fetch-depth: 0
# Npm
- name: Setup - Npm install
shell: bash
working-directory: ui
run: npm ci
# Setup build
- uses: kestra-io/actions/.github/actions/setup-build@main
name: Setup - Build
id: build
with:
java-enabled: true
node-enabled: true
# Get Plugins List
- name: Plugins - Get List
uses: ./.github/actions/plugins-list
if: "!startsWith(github.ref, 'refs/tags/v')"
id: plugins-list
with:
plugin-version: ${{ env.PLUGIN_VERSION }}
# Set Plugins List
- name: Plugins - Set List
id: plugins
if: "!startsWith(github.ref, 'refs/tags/v')"
shell: bash
run: |
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" || $TAG == v* ]]; then
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
else
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
fi
# Build
- name: Gradle - Build
shell: bash
run: |
./gradlew executableJar
- name: Artifacts - Copy exe to image
shell: bash
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker Tag
- name: Setup - Docker vars
id: vars
shell: bash
run: |
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" ]]
then
TAG="latest";
elif [[ $TAG = "develop" ]]
then
TAG="develop";
elif [[ $TAG = v* ]]
then
TAG="${TAG}";
else
TAG="build-${{ github.run_id }}";
fi
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
# Docker setup
- name: Docker - Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Fix Qemu
shell: bash
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
- name: Docker - Setup Buildx
uses: docker/setup-buildx-action@v3
# Docker Build
- name: Docker - Build & export image
uses: docker/build-push-action@v6
if: "!startsWith(github.ref, 'refs/tags/v')"
with:
context: .
push: false
file: Dockerfile
tags: |
kestra/kestra:${{ steps.vars.outputs.tag }}
build-args: |
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
# Upload artifacts
- name: Artifacts - Upload JAR
uses: actions/upload-artifact@v4
with:
name: jar
path: build/libs/
- name: Artifacts - Upload Executable
uses: actions/upload-artifact@v4
with:
name: exe
path: build/executable/
- name: Artifacts - Upload Docker
uses: actions/upload-artifact@v4
if: "!startsWith(github.ref, 'refs/tags/v')"
with:
name: ${{ steps.vars.outputs.artifact }}
path: /tmp/${{ steps.vars.outputs.artifact }}.tar

View File

@@ -1,86 +0,0 @@
name: Frontend - Tests
on:
workflow_call:
secrets:
GITHUB_AUTH_TOKEN:
description: "The GitHub Token."
required: true
CODECOV_TOKEN:
description: 'Codecov Token'
required: true
env:
# to save corepack from itself
COREPACK_INTEGRITY_KEYS: 0
jobs:
test:
name: Frontend - Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cache Node Modules
id: cache-node-modules
uses: actions/cache@v4
with:
path: |
ui/node_modules
key: modules-${{ hashFiles('ui/package-lock.json') }}
- name: Cache Playwright Binaries
id: cache-playwright
uses: actions/cache@v4
with:
path: |
~/.cache/ms-playwright
key: playwright-${{ hashFiles('ui/package-lock.json') }}
- name: Npm - install
shell: bash
if: steps.cache-node-modules.outputs.cache-hit != 'true'
working-directory: ui
run: npm ci
- name: Npm - lint
uses: reviewdog/action-eslint@v1
with:
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
reporter: github-pr-review
workdir: ui
- name: Npm - Run build
shell: bash
working-directory: ui
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: npm run build
- name: Storybook - Install Playwright
shell: bash
working-directory: ui
if: steps.cache-playwright.outputs.cache-hit != 'true'
run: npx playwright install --with-deps
- name: Run front-end unit tests
shell: bash
working-directory: ui
run: npm run test:cicd
- name: Codecov - Upload coverage reports
uses: codecov/codecov-action@v5
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend
- name: Codecov - Upload test results
uses: codecov/test-results-action@v1
if: ${{ !cancelled() }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN && github.event.pull_request.head.repo.full_name == github.repository }}
flags: frontend

View File

@@ -1,40 +0,0 @@
name: Github - Release
on:
workflow_call:
secrets:
GH_PERSONAL_TOKEN:
description: "The Github personal token."
required: true
jobs:
publish:
name: Github - Release
runs-on: ubuntu-latest
steps:
# Download Exec
- name: Artifacts - Download executable
uses: actions/download-artifact@v4
if: startsWith(github.ref, 'refs/tags/v')
with:
name: exe
path: build/executable
# Checkout GitHub Actions
- name: Checkout - Actions
uses: actions/checkout@v4
with:
repository: kestra-io/actions
sparse-checkout-cone-mode: true
path: actions
sparse-checkout: |
.github/actions
# GitHub Release
- name: Create GitHub release
uses: ./actions/.github/actions/github-release
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}

View File

@@ -1,146 +0,0 @@
name: Publish - Docker
on:
workflow_dispatch:
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: false
type: string
force-download-artifact:
description: 'Force download artifact'
required: false
type: string
default: "true"
workflow_call:
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: false
type: string
force-download-artifact:
description: 'Force download artifact'
required: false
type: string
default: "true"
secrets:
DOCKERHUB_USERNAME:
description: "The Dockerhub username."
required: true
DOCKERHUB_PASSWORD:
description: "The Dockerhub password."
required: true
jobs:
# ********************************************************************************************************************
# Build
# ********************************************************************************************************************
build-artifacts:
name: Build Artifacts
if: ${{ github.event.inputs.force-download-artifact == 'true' }}
uses: ./.github/workflows/workflow-build-artifacts.yml
with:
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
# ********************************************************************************************************************
# Docker
# ********************************************************************************************************************
publish:
name: Publish - Docker
runs-on: ubuntu-latest
needs: build-artifacts
if: |
always() &&
(needs.build-artifacts.result == 'success' ||
github.event.inputs.force-download-artifact != 'true')
env:
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
strategy:
matrix:
image:
- tag: -no-plugins
packages: jattach
plugins: false
python-libraries: ""
- tag: ""
plugins: true
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
python-libraries: kestra
steps:
- name: Checkout - Current ref
uses: actions/checkout@v4
# Docker setup
- name: Docker - Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Fix Qemu
shell: bash
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
- name: Docker - Setup Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Docker - Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
# # Get Plugins List
- name: Plugins - Get List
uses: ./.github/actions/plugins-list
id: plugins-list
if: ${{ matrix.image.plugins}}
with:
plugin-version: ${{ env.PLUGIN_VERSION }}
# Vars
- name: Docker - Set variables
shell: bash
id: vars
run: |
TAG=${GITHUB_REF#refs/*/}
PLUGINS="${{ matrix.image.plugins == true && steps.plugins-list.outputs.plugins || '' }}"
if [[ $TAG == v* ]]; then
TAG="${TAG}";
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
elif [[ $TAG = "develop" ]]; then
TAG="develop";
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
else
TAG="build-${{ github.run_id }}";
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
fi
echo "tag=${TAG}${{ matrix.image.tag }}" >> $GITHUB_OUTPUT
# Build Docker Image
- name: Artifacts - Download executable
uses: actions/download-artifact@v4
with:
name: exe
path: build/executable
- name: Docker - Copy exe to image
shell: bash
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker Build and push
- name: Docker - Build image
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: kestra/kestra:${{ steps.vars.outputs.tag }}
platforms: linux/amd64,linux/arm64
build-args: |
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
APT_PACKAGES=${{ matrix.image.packages }}
PYTHON_LIBRARIES=${{ matrix.image.python-libraries }}

View File

@@ -1,57 +0,0 @@
name: Publish - Maven
on:
workflow_call:
secrets:
SONATYPE_USER:
description: "The Sonatype username."
required: true
SONATYPE_PASSWORD:
description: "The Sonatype password."
required: true
SONATYPE_GPG_KEYID:
description: "The Sonatype GPG key id."
required: true
SONATYPE_GPG_PASSWORD:
description: "The Sonatype GPG password."
required: true
SONATYPE_GPG_FILE:
description: "The Sonatype GPG file."
required: true
jobs:
publish:
name: Publish - Maven
runs-on: ubuntu-latest
steps:
- name: Checkout - Current ref
uses: actions/checkout@v4
# Setup build
- name: Setup - Build
uses: kestra-io/actions/.github/actions/setup-build@main
id: build
with:
java-enabled: true
node-enabled: true
# Publish
- name: Publish - Release package to Maven Central
shell: bash
env:
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
run: |
mkdir -p ~/.gradle/
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
./gradlew publishToSonatype ${{ startsWith(github.ref, 'refs/tags/v') && 'closeAndReleaseSonatypeStagingRepository' || '' }}
# Gradle dependency
- name: Java - Gradle dependency graph
uses: gradle/actions/dependency-submission@v4

View File

@@ -1,80 +0,0 @@
name: Release
on:
workflow_dispatch:
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: false
type: string
publish-docker:
description: "Publish Docker image"
default: 'false'
required: false
type: string
workflow_call:
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: false
type: string
secrets:
DOCKERHUB_USERNAME:
description: "The Dockerhub username."
required: true
DOCKERHUB_PASSWORD:
description: "The Dockerhub password."
required: true
SONATYPE_USER:
description: "The Sonatype username."
required: true
SONATYPE_PASSWORD:
description: "The Sonatype password."
required: true
SONATYPE_GPG_KEYID:
description: "The Sonatype GPG key id."
required: true
SONATYPE_GPG_PASSWORD:
description: "The Sonatype GPG password."
required: true
SONATYPE_GPG_FILE:
description: "The Sonatype GPG file."
required: true
jobs:
build-artifacts:
name: Build - Artifacts
uses: ./.github/workflows/workflow-build-artifacts.yml
with:
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
Docker:
name: Publish Docker
needs: build-artifacts
uses: ./.github/workflows/workflow-publish-docker.yml
if: startsWith(github.ref, 'refs/heads/develop') || github.event.inputs.publish-docker == 'true'
with:
force-download-artifact: 'false'
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
Maven:
name: Publish Maven
uses: ./.github/workflows/workflow-publish-maven.yml
secrets:
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
Github:
name: Github Release
needs: build-artifacts
if: startsWith(github.ref, 'refs/tags/v')
uses: ./.github/workflows/workflow-github-release.yml
secrets:
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}

View File

@@ -1,97 +0,0 @@
name: Tests
on:
schedule:
- cron: '0 4 * * 1,2,3,4,5'
workflow_call:
inputs:
report-status:
description: "Report status of the jobs in outputs"
type: string
required: false
default: false
outputs:
frontend_status:
description: "Status of the frontend job"
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
backend_status:
description: "Status of the backend job"
value: ${{ jobs.set-backend-status.outputs.backend_status }}
jobs:
file-changes:
name: File changes detection
runs-on: ubuntu-latest
timeout-minutes: 60
outputs:
ui: ${{ steps.changes.outputs.ui }}
backend: ${{ steps.changes.outputs.backend }}
steps:
- uses: actions/checkout@v4
if: "!startsWith(github.ref, 'refs/tags/v')"
- uses: dorny/paths-filter@v3
if: "!startsWith(github.ref, 'refs/tags/v')"
id: changes
with:
filters: |
ui:
- 'ui/**'
backend:
- '!{ui,.github}/**'
token: ${{ secrets.GITHUB_TOKEN }}
frontend:
name: Frontend - Tests
needs: file-changes
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
uses: ./.github/workflows/workflow-frontend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
backend:
name: Backend - Tests
needs: file-changes
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
uses: ./.github/workflows/workflow-backend-test.yml
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
# Output every job status
# To be used in other workflows
report-status:
name: Report Status
runs-on: ubuntu-latest
needs: [ frontend, backend ]
if: always() && (inputs.report-status == 'true')
outputs:
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
steps:
- id: set-frontend-status
name: Set frontend job status
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
- id: set-backend-status
name: Set backend job status
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
notify:
name: Notify - Slack
runs-on: ubuntu-latest
needs: [ frontend, backend ]
if: github.event_name == 'schedule'
steps:
- name: Notify failed CI
id: send-ci-failed
if: |
always() && (needs.frontend.result != 'success' ||
needs.backend.result != 'success')
uses: kestra-io/actions/.github/actions/send-ci-failed@main
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

6
.gitignore vendored
View File

@@ -36,8 +36,6 @@ yarn.lock
ui/coverage
ui/stats.html
ui/.frontend-gradle-plugin
ui/utils/CHANGELOG.md
ui/test-report.junit.xml
### Docker
/.env
@@ -56,7 +54,3 @@ core/src/main/resources/gradle.properties
# Allure Reports
**/allure-results/*
*storybook.log
storybook-static
/jmh-benchmarks/src/main/resources/gradle.properties

207
.plugins
View File

@@ -1,113 +1,102 @@
#
# List of plugins to install locally with: $ make install-plugins
# Format: <RepositoryName>:<GroupId>:<ArtifactId>:<Version>
#
# Uncomment the lines corresponding to the plugins to be installed:
#plugin-airbyte:io.kestra.plugin:plugin-airbyte:LATEST
#plugin-airflow:io.kestra.plugin:plugin-airflow:LATEST
#plugin-amqp:io.kestra.plugin:plugin-amqp:LATEST
#plugin-ansible:io.kestra.plugin:plugin-ansible:LATEST
#plugin-aws:io.kestra.plugin:plugin-aws:LATEST
#plugin-azure:io.kestra.plugin:plugin-azure:LATEST
#plugin-cassandra:io.kestra.plugin:plugin-cassandra:LATEST
#plugin-cloudquery:io.kestra.plugin:plugin-cloudquery:LATEST
#plugin-compress:io.kestra.plugin:plugin-compress:LATEST
#plugin-couchbase:io.kestra.plugin:plugin-couchbase:LATEST
#plugin-crypto:io.kestra.plugin:plugin-crypto:LATEST
#plugin-databricks:io.kestra.plugin:plugin-databricks:LATEST
#plugin-datahub:io.kestra.plugin:plugin-datahub:LATEST
#plugin-dataform:io.kestra.plugin:plugin-dataform:LATEST
#plugin-dbt:io.kestra.plugin:plugin-dbt:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-db2:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-mongodb:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-mysql:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-oracle:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-postgres:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-sqlserver:LATEST
#plugin-docker:io.kestra.plugin:plugin-docker:LATEST
#plugin-elasticsearch:io.kestra.plugin:plugin-elasticsearch:LATEST
#plugin-fivetran:io.kestra.plugin:plugin-fivetran:LATEST
#plugin-fs:io.kestra.plugin:plugin-fs:LATEST
#plugin-gcp:io.kestra.plugin:plugin-gcp:LATEST
#plugin-git:io.kestra.plugin:plugin-git:LATEST
#plugin-github:io.kestra.plugin:plugin-github:LATEST
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
#plugin-graphql:io.kestra.plugin:plugin-graphql:LATEST
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
#plugin-influxdb:io.kestra.plugin:plugin-influxdb:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-as400:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-db2:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-duckdb:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-druid:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mariadb:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mysql:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-oracle:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-pinot:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-postgres:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-redshift:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-snowflake:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlserver:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-trino:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-vectorwise:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-vertica:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-dremio:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlite:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
#plugin-ldap:io.kestra.plugin:plugin-langchain4j:LATEST
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
#plugin-meilisearch:io.kestra.plugin:plugin-meilisearch:LATEST
#plugin-minio:io.kestra.plugin:plugin-minio:LATEST
#plugin-modal:io.kestra.plugin:plugin-modal:LATEST
#plugin-mongodb:io.kestra.plugin:plugin-mongodb:LATEST
#plugin-mqtt:io.kestra.plugin:plugin-mqtt:LATEST
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jython:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-nashorn:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-node:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-powershell:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-python:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-r:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-ruby:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-shell:LATEST
#plugin-serdes:io.kestra.plugin:plugin-serdes:LATEST
#plugin-servicenow:io.kestra.plugin:plugin-servicenow:LATEST
#plugin-singer:io.kestra.plugin:plugin-singer:LATEST
#plugin-soda:io.kestra.plugin:plugin-soda:LATEST
#plugin-solace:io.kestra.plugin:plugin-solace:LATEST
#plugin-spark:io.kestra.plugin:plugin-spark:LATEST
#plugin-sqlmesh:io.kestra.plugin:plugin-sqlmesh:LATEST
#plugin-surrealdb:io.kestra.plugin:plugin-surrealdb:LATEST
#plugin-terraform:io.kestra.plugin:plugin-terraform:LATEST
#plugin-transform:io.kestra.plugin:plugin-transform-grok:LATEST
#plugin-transform:io.kestra.plugin:plugin-transform-json:LATEST
#plugin-tika:io.kestra.plugin:plugin-tika:LATEST
#plugin-weaviate:io.kestra.plugin:plugin-weaviate:LATEST
#plugin-zendesk:io.kestra.plugin:plugin-zendesk:LATEST
#plugin-typesense:io.kestra.plugin:plugin-typesense:LATEST
#storage-azure:io.kestra.storage:storage-azure:LATEST
#storage-gcs:io.kestra.storage:storage-gcs:LATEST
#storage-minio:io.kestra.storage:storage-minio:LATEST
#storage-s3:io.kestra.storage:storage-s3:LATEST
#
#io.kestra.plugin:plugin-airbyte:LATEST
#io.kestra.plugin:plugin-airflow:LATEST
#io.kestra.plugin:plugin-amqp:LATEST
#io.kestra.plugin:plugin-ansible:LATEST
#io.kestra.plugin:plugin-aws:LATEST
#io.kestra.plugin:plugin-azure:LATEST
#io.kestra.plugin:plugin-cassandra:LATEST
#io.kestra.plugin:plugin-cloudquery:LATEST
#io.kestra.plugin:plugin-compress:LATEST
#io.kestra.plugin:plugin-couchbase:LATEST
#io.kestra.plugin:plugin-crypto:LATEST
#io.kestra.plugin:plugin-databricks:LATEST
#io.kestra.plugin:plugin-datahub:LATEST
#io.kestra.plugin:plugin-dataform:LATEST
#io.kestra.plugin:plugin-dbt:LATEST
#io.kestra.plugin:plugin-debezium-db2:LATEST
#io.kestra.plugin:plugin-debezium-mongodb:LATEST
#io.kestra.plugin:plugin-debezium-mysql:LATEST
#io.kestra.plugin:plugin-debezium-oracle:LATEST
#io.kestra.plugin:plugin-debezium-postgres:LATEST
#io.kestra.plugin:plugin-debezium-sqlserver:LATEST
#io.kestra.plugin:plugin-docker:LATEST
#io.kestra.plugin:plugin-elasticsearch:LATEST
#io.kestra.plugin:plugin-fivetran:LATEST
#io.kestra.plugin:plugin-fs:LATEST
#io.kestra.plugin:plugin-gcp:LATEST
#io.kestra.plugin:plugin-git:LATEST
#io.kestra.plugin:plugin-github:LATEST
#io.kestra.plugin:plugin-googleworkspace:LATEST
#io.kestra.plugin:plugin-hightouch:LATEST
#io.kestra.plugin:plugin-hubspot:LATEST
#io.kestra.plugin:plugin-jdbc-as400:LATEST
#io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
#io.kestra.plugin:plugin-jdbc-db2:LATEST
#io.kestra.plugin:plugin-jdbc-duckdb:LATEST
#io.kestra.plugin:plugin-jdbc-druid:LATEST
#io.kestra.plugin:plugin-jdbc-mysql:LATEST
#io.kestra.plugin:plugin-jdbc-oracle:LATEST
#io.kestra.plugin:plugin-jdbc-pinot:LATEST
#io.kestra.plugin:plugin-jdbc-postgres:LATEST
#io.kestra.plugin:plugin-jdbc-redshift:LATEST
#io.kestra.plugin:plugin-jdbc-snowflake:LATEST
#io.kestra.plugin:plugin-jdbc-sqlserver:LATEST
#io.kestra.plugin:plugin-jdbc-trino:LATEST
#io.kestra.plugin:plugin-jdbc-vectorwise:LATEST
#io.kestra.plugin:plugin-jdbc-vertica:LATEST
#io.kestra.plugin:plugin-jdbc-dremio:LATEST
#io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
#io.kestra.plugin:plugin-jdbc-sqlite:LATEST
#io.kestra.plugin:plugin-jdbc-sybase:LATEST
#io.kestra.plugin:plugin-jira:LATEST
#io.kestra.plugin:plugin-kafka:LATEST
#io.kestra.plugin:plugin-kubernetes:LATEST
#io.kestra.plugin:plugin-ldap:LATEST
#io.kestra.plugin:plugin-linear:LATEST
#io.kestra.plugin:plugin-malloy:LATEST
#io.kestra.plugin:plugin-meilisearch:LATEST
#io.kestra.plugin:plugin-minio:LATEST
#io.kestra.plugin:plugin-modal:LATEST
#io.kestra.plugin:plugin-mongodb:LATEST
#io.kestra.plugin:plugin-mqtt:LATEST
#io.kestra.plugin:plugin-nats:LATEST
#io.kestra.plugin:plugin-neo4j:LATEST
#io.kestra.plugin:plugin-notifications:LATEST
#io.kestra.plugin:plugin-openai:LATEST
#io.kestra.plugin:plugin-powerbi:LATEST
#io.kestra.plugin:plugin-pulsar:LATEST
#io.kestra.plugin:plugin-redis:LATEST
#io.kestra.plugin:plugin-script-groovy:LATEST
#io.kestra.plugin:plugin-script-jbang:LATEST
#io.kestra.plugin:plugin-script-julia:LATEST
#io.kestra.plugin:plugin-script-jython:LATEST
#io.kestra.plugin:plugin-script-nashorn:LATEST
#io.kestra.plugin:plugin-script-node:LATEST
#io.kestra.plugin:plugin-script-powershell:LATEST
#io.kestra.plugin:plugin-script-python:LATEST
#io.kestra.plugin:plugin-script-r:LATEST
#io.kestra.plugin:plugin-script-ruby:LATEST
#io.kestra.plugin:plugin-script-shell:LATEST
#io.kestra.plugin:plugin-serdes:LATEST
#io.kestra.plugin:plugin-servicenow:LATEST
#io.kestra.plugin:plugin-singer:LATEST
#io.kestra.plugin:plugin-soda:LATEST
#io.kestra.plugin:plugin-solace:LATEST
#io.kestra.plugin:plugin-spark:LATEST
#io.kestra.plugin:plugin-sqlmesh:LATEST
#io.kestra.plugin:plugin-surrealdb:LATEST
#io.kestra.plugin:plugin-terraform:LATEST
#io.kestra.plugin:plugin-transform-grok:LATEST
#io.kestra.plugin:plugin-transform-json:LATEST
#io.kestra.plugin:plugin-tika:LATEST
#io.kestra.plugin:plugin-weaviate:LATEST
#io.kestra.plugin:plugin-zendesk:LATEST
#io.kestra.storage:storage-azure:LATEST
#io.kestra.storage:storage-gcs:LATEST
#io.kestra.storage:storage-minio:LATEST
#io.kestra.storage:storage-s3:LATEST

View File

@@ -1 +0,0 @@
**/*.*

View File

@@ -16,9 +16,8 @@ RUN apt-get update -y && \
if [ -n "${APT_PACKAGES}" ]; then apt-get install -y --no-install-recommends ${APT_PACKAGES}; fi && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /var/tmp/* /tmp/* && \
curl -LsSf https://astral.sh/uv/0.6.17/install.sh | sh && mv /root/.local/bin/uv /bin && mv /root/.local/bin/uvx /bin && \
if [ -n "${KESTRA_PLUGINS}" ]; then /app/kestra plugins install ${KESTRA_PLUGINS} && rm -rf /tmp/*; fi && \
if [ -n "${PYTHON_LIBRARIES}" ]; then uv pip install --system ${PYTHON_LIBRARIES}; fi && \
if [ -n "${PYTHON_LIBRARIES}" ]; then pip install ${PYTHON_LIBRARIES}; fi && \
chown -R kestra:kestra /app
USER kestra

View File

@@ -17,8 +17,6 @@ VERSION := $(shell ./gradlew properties -q | awk '/^version:/ {print $$2}')
GIT_COMMIT := $(shell git rev-parse --short HEAD)
GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
DATE := $(shell date --rfc-3339=seconds)
PLUGIN_GIT_DIR ?= $(pwd)/..
PLUGIN_JARS_DIR ?= $(pwd)/locals/plugins
DOCKER_IMAGE = kestra/kestra
DOCKER_PATH = ./
@@ -69,7 +67,6 @@ install-plugins:
[[ $$plugin =~ ^#.* ]] && continue; \
PLUGINS_PATH="${KESTRA_INSTALL_DIR}/plugins"; \
CURRENT_PLUGIN=$${plugin/LATEST/"${VERSION}"}; \
CURRENT_PLUGIN=$$(echo $$CURRENT_PLUGIN | cut -d':' -f2-); \
PLUGIN_FILE="$$PLUGINS_PATH/$$(echo $$CURRENT_PLUGIN | awk -F':' '{print $$2"-"$$3}').jar"; \
echo "Installing Kestra plugin $$CURRENT_PLUGIN > ${KESTRA_INSTALL_DIR}/plugins"; \
if [ -f "$$PLUGIN_FILE" ]; then \
@@ -89,7 +86,7 @@ build-docker: build-exec
--compress \
--rm \
-f ./Dockerfile \
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip" \
--build-arg="PYTHON_LIBRARIES=kestra" \
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
@@ -176,88 +173,3 @@ start-standalone-postgres: kill --private-start-standalone-postgres health
start-standalone-local: kill --private-start-standalone-local health
#checkout all plugins
clone-plugins:
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
@mkdir -p "$(PLUGIN_GIT_DIR)"
@echo "Fetching repository list from GitHub..."
@REPOS=$$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-"); \
for repo in $$REPOS; do \
if [[ $$repo == plugin-* ]]; then \
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
echo "Skipping: $$repo (Already cloned)"; \
else \
echo "Cloning: $$repo using SSH..."; \
git clone "git@github.com:kestra-io/$$repo.git" "$(PLUGIN_GIT_DIR)/$$repo"; \
fi; \
fi; \
done
@echo "Done!"
# Pull every plugins in main or master branch
pull-plugins:
@echo "🔍 Pulling repositories in '$(PLUGIN_GIT_DIR)'..."
@for repo in "$(PLUGIN_GIT_DIR)"/*; do \
if [ -d "$$repo/.git" ]; then \
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
echo "🔄 Pulling: $$(basename "$$repo") (branch: $$branch)"; \
git -C "$$repo" pull; \
else \
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch, currently on $$branch)"; \
fi; \
fi; \
done
@echo "✅ Done pulling!"
# Update all plugins jar
build-plugins:
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
@MASTER_REPOS=(); \
for repo in "$(PLUGIN_GIT_DIR)"/*; do \
if [ -d "$$repo/.git" ]; then \
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
MASTER_REPOS+=("$$repo"); \
else \
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch)"; \
fi; \
fi; \
done; \
\
# === STEP 2: Update Repos on Master or Main Branch === \
echo "⬇️ Updating repositories on master or main branch..."; \
for repo in "$${MASTER_REPOS[@]}"; do \
echo "🔄 Updating: $$(basename "$$repo")"; \
git -C "$$repo" pull --rebase; \
done; \
\
# === STEP 3: Build with Gradle === \
echo "⚙️ Building repositories with Gradle..."; \
for repo in "$${MASTER_REPOS[@]}"; do \
echo "🔨 Building: $$(basename "$$repo")"; \
gradle clean build -x test shadowJar -p "$$repo"; \
done; \
\
# === STEP 4: Copy Latest JARs (Ignoring javadoc & sources) === \
echo "📦 Organizing built JARs..."; \
mkdir -p "$(PLUGIN_JARS_DIR)"; \
for repo in "$${MASTER_REPOS[@]}"; do \
REPO_NAME=$$(basename "$$repo"); \
\
JARS=($$(find "$$repo" -type f -name "plugin-*.jar" ! -name "*-javadoc.jar" ! -name "*-sources.jar")); \
if [ $${#JARS[@]} -eq 0 ]; then \
echo "⚠️ Warning: No valid plugin JARs found for $$REPO_NAME"; \
continue; \
fi; \
\
for jar in "$${JARS[@]}"; do \
JAR_NAME=$$(basename "$$jar"); \
BASE_NAME=$$(echo "$$JAR_NAME" | sed -E 's/(-[0-9]+.*)?\.jar$$//'); \
rm -f "$(PLUGIN_JARS_DIR)/$$BASE_NAME"-[0-9]*.jar; \
cp "$$jar" "$(PLUGIN_JARS_DIR)/"; \
echo "✅ Copied JAR: $$JAR_NAME"; \
done; \
done; \
\
echo "🎉 Done! All master and main branch repos updated, built, and organized."

View File

@@ -19,18 +19,11 @@
<br />
<p align="center">
<a href="https://x.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="X(formerly Twitter)" /></a> &nbsp;
<a href="https://twitter.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="twitter" /></a> &nbsp;
<a href="https://www.linkedin.com/company/kestra/"><img height="25" src="https://kestra.io/linkedin.svg" alt="linkedin" /></a> &nbsp;
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a> &nbsp;
</p>
<p align="center">
<a href="https://trendshift.io/repositories/2714" target="_blank">
<img src="https://trendshift.io/api/badge/repositories/2714" alt="kestra-io%2Fkestra | Trendshift" width="250" height="55"/>
</a>
<a href="https://www.producthunt.com/posts/kestra?embed=true&utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-kestra" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=612077&theme=light&period=daily&t=1740737506162" alt="Kestra - All&#0045;in&#0045;one&#0032;automation&#0032;&#0038;&#0032;orchestration&#0032;platform | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
</p>
<p align="center">
<a href="https://go.kestra.io/video/product-overview" target="_blank">
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
@@ -54,7 +47,7 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
- **Structure & Resilience**: tame chaos and bring resilience to your workflows with **namespaces**, **labels**, **subflows**, **retries**, **timeout**, **error handling**, **inputs**, **outputs** that generate artifacts in the UI, **variables**, **conditional branching**, **advanced scheduling**, **event triggers**, **backfills**, **dynamic tasks**, **sequential and parallel tasks**, and skip tasks or triggers when needed by setting the flag `disabled` to `true`.
🧑‍💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
🧑‍💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
<p align="center">
@@ -81,27 +74,6 @@ docker run --pull=always --rm -it -p 8080:8080 --user=root \
-v /tmp:/tmp kestra/kestra:latest server local
```
If you're on Windows and use PowerShell:
```powershell
docker run --pull=always --rm -it -p 8080:8080 --user=root `
-v "/var/run/docker.sock:/var/run/docker.sock" `
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
If you're on Windows and use Command Prompt (CMD):
```cmd
docker run --pull=always --rm -it -p 8080:8080 --user=root ^
-v "/var/run/docker.sock:/var/run/docker.sock" ^
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
If you're on Windows and use WSL (Linux-based environment in Windows):
```bash
docker run --pull=always --rm -it -p 8080:8080 --user=root \
-v "/var/run/docker.sock:/var/run/docker.sock" \
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
Check our [Installation Guide](https://kestra.io/docs/installation) for other deployment options (Docker Compose, Podman, Kubernetes, AWS, GCP, Azure, and more).
Access the Kestra UI at [http://localhost:8080](http://localhost:8080) and start building your first flow!
@@ -170,7 +142,7 @@ Kestra provides an intuitive UI that allows you to interactively build and visua
- **Drag-and-Drop Interface:** add and rearrange tasks from the Topology Editor.
- **Real-Time Validation:** instant feedback on your workflow's syntax and structure to catch errors early.
- **Auto-Completion:** smart suggestions as you type to write flow code quickly and without syntax errors.
- **Auto-Completion:** smart suggestions as you type.
- **Live Topology View:** see your workflow as a Directed Acyclic Graph (DAG) that updates in real-time.
---
@@ -195,9 +167,9 @@ Create custom plugins to extend Kestra's capabilities. Check out our [Plugin Dev
Stay connected and get support:
- **Slack:** Join our [Slack community](https://kestra.io/slack) to ask questions and share ideas.
- **LinkedIn:** Follow us on [LinkedIn](https://www.linkedin.com/company/kestra/) — next to Slack and GitHub, this is our main channel to share updates and product announcements.
- **YouTube:** Subscribe to our [YouTube channel](https://www.youtube.com/@kestra-io) for educational video content. We publish new videos every week!
- **X:** Follow us on [X](https://x.com/kestra_io) if you're still active there.
- **Twitter:** Follow us on [Twitter](https://twitter.com/kestra_io) for the latest updates.
- **YouTube:** Subscribe to our [YouTube channel](https://www.youtube.com/@kestra-io) for tutorials and webinars.
- **LinkedIn:** Connect with us on [LinkedIn](https://www.linkedin.com/company/kestra/).
---
@@ -206,9 +178,8 @@ Stay connected and get support:
We welcome contributions of all kinds!
- **Report Issues:** Found a bug or have a feature request? Open an [issue on GitHub](https://github.com/kestra-io/kestra/issues).
- **Contribute Code:** Check out our [Contributor Guide](https://kestra.io/docs/getting-started/contributing) for initial guidelines, and explore our [good first issues](https://go.kestra.io/contributing) for beginner-friendly tasks to tackle first.
- **Contribute Code:** Check out our [Contributor Guide](https://kestra.io/docs/getting-started/contributing) to start contributing.
- **Develop Plugins:** Build and share plugins using our [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/).
- **Contribute to our Docs:** Contribute edits or updates to keep our [documentation](https://github.com/kestra-io/docs) top-notch.
---

View File

@@ -1,33 +0,0 @@
# Security Policy
## Supported Versions
We provide security updates for the following versions of Kestra:
- The `latest` release
- Up to two previous minor versions released as a backport upon customer request.
If you are using an unsupported version, we recommend upgrading to the `latest` version to receive security fixes.
## Reporting a Vulnerability
If you discover a security vulnerability in Kestra, please report it to us privately to ensure a responsible disclosure process. You can contact our security team at:
**security@kestra.io**
### Guidelines for Reporting
- Provide a detailed description of the issue, including steps to reproduce it if possible.
- Do not disclose the vulnerability publicly until we have confirmed and patched the issue.
- If you believe the issue has critical severity, please indicate so in your report to help us prioritize.
## Our Commitment
- We will acknowledge your report within **2 business days**.
- We will work to verify and address the issue as quickly as possible.
- Once the issue is resolved, we will notify you of the fix.
## Acknowledgments
We are happy to credit those who report vulnerabilities responsibly in our release notes, unless you prefer to remain anonymous. If you would like to be acknowledged, please include this in your report.
Thank you for helping to make Kestra more secure!

View File

@@ -1,5 +1,4 @@
import net.e175.klaus.zip.ZipPrefixer
import org.owasp.dependencycheck.gradle.extension.AnalyzerExtension
buildscript {
repositories {
@@ -16,30 +15,30 @@ plugins {
id "java"
id 'java-library'
id "idea"
id "com.gradleup.shadow" version "8.3.6"
id "com.github.johnrengelman.shadow" version "8.1.1"
id "application"
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "6.2.0.5505"
id "org.sonarqube" version "5.1.0.4882"
id 'jacoco-report-aggregation'
// helper
id "com.github.ben-manes.versions" version "0.52.0"
id "com.github.ben-manes.versions" version "0.51.0"
// front
id 'com.github.node-gradle.node' version '7.1.0'
id 'org.siouan.frontend-jdk17' version '8.1.0' apply false
// release
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
id 'net.researchgate.release' version '3.1.0'
id "com.gorylenko.gradle-git-properties" version "2.5.0"
id 'net.researchgate.release' version '3.0.2'
id "com.gorylenko.gradle-git-properties" version "2.4.2"
id 'signing'
id 'ru.vyarus.pom' version '3.0.0' apply false
id 'ru.vyarus.github-info' version '2.0.0' apply false
// OWASP dependency check
id "org.owasp.dependencycheck" version "12.1.1" apply false
id "org.owasp.dependencycheck" version "10.0.4" apply false
}
idea {
@@ -52,17 +51,9 @@ idea {
/**********************************************************************************************************************\
* Main
**********************************************************************************************************************/
final mainClassName = "io.kestra.cli.App"
final targetJavaVersion = JavaVersion.VERSION_21
application {
mainClass = mainClassName
}
java {
sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion
}
mainClassName = "io.kestra.cli.App"
sourceCompatibility = 21
targetCompatibility = 21
dependencies {
implementation project(":cli")
@@ -74,12 +65,10 @@ dependencies {
**********************************************************************************************************************/
allprojects {
if (it.name != 'platform') {
group = "io.kestra"
group "io.kestra"
java {
sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion
}
sourceCompatibility = 21
targetCompatibility = 21
repositories {
mavenCentral()
@@ -123,7 +112,6 @@ allprojects {
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
micronaut "io.micronaut:micronaut-http-client"
micronaut "io.micronaut.reactor:micronaut-reactor-http-client"
micronaut "io.micronaut.tracing:micronaut-tracing-opentelemetry-http"
// logs
implementation "org.slf4j:slf4j-api"
@@ -134,9 +122,6 @@ allprojects {
implementation group: 'org.slf4j', name: 'jcl-over-slf4j'
implementation group: 'org.fusesource.jansi', name: 'jansi'
// OTEL
implementation "io.opentelemetry:opentelemetry-exporter-otlp"
// jackson
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind'
@@ -165,13 +150,11 @@ allprojects {
* Test
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
if (it.name != 'platform') {
apply plugin: "com.adarshr.test-logger"
java {
sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion
}
sourceCompatibility = 21
targetCompatibility = 21
dependencies {
// Platform
@@ -196,16 +179,12 @@ subprojects {
testImplementation 'org.hamcrest:hamcrest'
testImplementation 'org.hamcrest:hamcrest-library'
testImplementation 'org.exparity:hamcrest-date'
//assertj
testImplementation 'org.assertj:assertj-core'
}
test {
useJUnitPlatform()
// set Xmx for test workers
maxHeapSize = '4g'
maxHeapSize = "4048m"
// configure en_US default locale for tests
systemProperty 'user.language', 'en'
@@ -216,8 +195,8 @@ subprojects {
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
environment 'SECRET_NON_B64_SECRET', "some secret value"
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
environment 'ENV_TEST1', "true"
environment 'ENV_TEST2', "Pass by env"
environment 'KESTRA_TEST1', "true"
environment 'KESTRA_TEST2', "Pass by env"
}
testlogger {
@@ -268,7 +247,7 @@ subprojects {
* Allure Reports
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
if (it.name != 'platform') {
dependencies {
testImplementation platform("io.qameta.allure:allure-bom")
testImplementation "io.qameta.allure:allure-junit5"
@@ -282,7 +261,7 @@ subprojects {
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.24"
agent "org.aspectj:aspectjweaver:1.9.22.1"
}
test {
@@ -295,7 +274,7 @@ subprojects {
* Jacoco
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
if (it.name != 'platform') {
apply plugin: 'jacoco'
test {
@@ -346,17 +325,12 @@ dependencyCheck {
failBuildOnCVSS = 7
// disable the .NET assembly analyzer as otherwise it wants to analyze EXE file
analyzers(new Action<AnalyzerExtension>() {
@Override
void execute(AnalyzerExtension analyzerExtension) {
analyzerExtension.assemblyEnabled = false
}
})
analyzers {
assemblyEnabled = false
}
// configure a suppression file
suppressionFile = "$projectDir/owasp-dependency-suppressions.xml"
nvd.apiKey = System.getenv("NVD_API_KEY")
}
/**********************************************************************************************************************\
@@ -364,7 +338,7 @@ dependencyCheck {
**********************************************************************************************************************/
allprojects {
gradle.projectsEvaluated {
tasks.withType(JavaCompile).configureEach {
tasks.withType(JavaCompile) {
options.encoding = "UTF-8"
options.compilerArgs.add("-parameters")
options.compilerArgs.add("-Xlint:all")
@@ -373,7 +347,7 @@ allprojects {
}
}
tasks.withType(JavaCompile).configureEach {
tasks.withType(JavaCompile) {
options.encoding = "UTF-8"
options.compilerArgs.add("-parameters")
}
@@ -418,25 +392,20 @@ shadowJar.dependsOn 'ui:assembleFrontend'
/**********************************************************************************************************************\
* Executable Jar
**********************************************************************************************************************/
def executableDir = layout.buildDirectory.dir("executable")
def executable = layout.buildDirectory.file("executable/${project.name}-${project.version}").get().asFile
def executableDir = file("${buildDir}/executable")
def executable = file("${buildDir}/executable/${project.name}-${project.version}")
tasks.register('writeExecutableJar') {
task writeExecutableJar() {
group "build"
description "Write an executable jar from shadow jar"
dependsOn = [shadowJar]
final shadowJarFile = tasks.shadowJar.outputs.files.singleFile
inputs.file shadowJarFile
outputs.file executable
outputs.cacheIf { true }
doFirst {
executableDir.get().asFile.mkdirs()
executableDir.mkdirs()
}
doLast {
executable.setBytes(shadowJarFile.readBytes())
executable.setBytes(file("${buildDir}/libs/${project.name}-${project.version}.jar").readBytes())
ByteArrayOutputStream executableBytes = new ByteArrayOutputStream()
executableBytes.write("\n: <<END_OF_KESTRA_SELFRUN\r\n".getBytes())
executableBytes.write(file("gradle/jar/selfrun.bat").readBytes())
@@ -448,13 +417,13 @@ tasks.register('writeExecutableJar') {
}
}
tasks.register('executableJar', Zip) {
task executableJar(type: Zip) {
group "build"
description "Zip the executable jar"
dependsOn = [writeExecutableJar]
archiveFileName = "${project.name}-${project.version}.zip"
destinationDirectory = layout.buildDirectory.dir('archives')
destinationDirectory = file("${buildDir}/archives")
from executableDir
archiveClassifier.set(null)
@@ -463,24 +432,14 @@ tasks.register('executableJar', Zip) {
/**********************************************************************************************************************\
* Standalone
**********************************************************************************************************************/
tasks.register('runLocal', JavaExec) {
task runLocal(type: JavaExec) {
group = "application"
description = "Run Kestra as server local"
classpath = project(":cli").sourceSets.main.runtimeClasspath
mainClass = mainClassName
environment 'MICRONAUT_ENVIRONMENTS', 'override'
args 'server', 'local', '--plugins', 'local/plugins'
}
tasks.register('runStandalone', JavaExec) {
group = "application"
description = "Run Kestra as server local"
classpath = project(":cli").sourceSets.main.runtimeClasspath
mainClass = mainClassName
environment 'MICRONAUT_ENVIRONMENTS', 'override'
args 'server', 'standalone', '--plugins', 'local/plugins'
}
/**********************************************************************************************************************\
* Publish
**********************************************************************************************************************/
@@ -496,101 +455,93 @@ nexusPublishing {
}
subprojects {
apply plugin: "maven-publish"
apply plugin: 'signing'
apply plugin: 'ru.vyarus.pom'
apply plugin: 'ru.vyarus.github-info'
if (it.name != 'jmh-benchmarks') {
apply plugin: "maven-publish"
apply plugin: 'signing'
apply plugin: 'ru.vyarus.pom'
apply plugin: 'ru.vyarus.github-info'
javadoc {
options {
locale = 'en_US'
encoding = 'UTF-8'
addStringOption("Xdoclint:none", "-quiet")
}
}
javadoc {
options {
locale = 'en_US'
encoding = 'UTF-8'
addStringOption("Xdoclint:none", "-quiet")
task sourcesJar(type: Jar) {
dependsOn = [':core:copyGradleProperties']
dependsOn = [':ui:assembleFrontend']
archiveClassifier.set('sources')
from sourceSets.main.allSource
}
sourcesJar.dependsOn ':core:copyGradleProperties'
sourcesJar.dependsOn ':ui:assembleFrontend'
task javadocJar(type: Jar) {
archiveClassifier.set('javadoc')
from javadoc
}
task testsJar(type: Jar) {
archiveClassifier.set('tests')
from sourceSets.test.output
}
github {
user 'kestra-io'
license 'Apache'
repository 'kestra'
site 'https://kestra.io'
}
maven.pom {
description = 'The modern, scalable orchestrator & scheduler open source platform'
developers {
developer {
id = "tchiotludo"
name = "Ludovic Dehon"
}
}
}
tasks.register('sourcesJar', Jar) {
dependsOn = [':core:copyGradleProperties']
dependsOn = [':ui:assembleFrontend']
archiveClassifier.set('sources')
from sourceSets.main.allSource
}
sourcesJar.dependsOn ':core:copyGradleProperties'
sourcesJar.dependsOn ':ui:assembleFrontend'
publishing {
publications {
sonatypePublication(MavenPublication) {
version project.version
tasks.register('javadocJar', Jar) {
archiveClassifier.set('javadoc')
from javadoc
}
if (project.name.contains('cli')) {
groupId "io.kestra"
artifactId "kestra"
tasks.register('testsJar', Jar) {
group = 'build'
description = 'Build the tests jar'
artifact shadowJar
artifact executableJar
} else if (project.name.contains('platform')){
groupId project.group
artifactId project.name
} else {
from components.java
archiveClassifier.set('tests')
if (sourceSets.matching { it.name == 'test'}) {
from sourceSets.named('test').get().output
}
}
groupId project.group
artifactId project.name
github {
user 'kestra-io'
license 'Apache'
repository 'kestra'
site 'https://kestra.io'
}
maven.pom {
description = 'The modern, scalable orchestrator & scheduler open source platform'
developers {
developer {
id = "tchiotludo"
name = "Ludovic Dehon"
artifact sourcesJar
artifact javadocJar
artifact testsJar
}
}
}
}
publishing {
publications {
sonatypePublication(MavenPublication) {
version project.version
signing {
// only sign JARs that we publish to Sonatype
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
sign publishing.publications.sonatypePublication
}
if (project.name.contains('cli')) {
groupId "io.kestra"
artifactId "kestra"
artifact shadowJar
artifact executableJar
} else if (project.name.contains('platform')){
groupId project.group
artifactId project.name
} else {
from components.java
groupId project.group
artifactId project.name
artifact sourcesJar
artifact javadocJar
artifact testsJar
}
}
}
}
signing {
// only sign JARs that we publish to Sonatype
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
sign publishing.publications.sonatypePublication
}
tasks.withType(GenerateModuleMetadata).configureEach {
// Suppression this validation error as we want to enforce the Kestra platform
suppressedValidationErrors.add('enforced-platform')
}
tasks.withType(GenerateModuleMetadata).configureEach {
// Suppression this validation error as we want to enforce the Kestra platform
suppressedValidationErrors.add('enforced-platform')
}
}
@@ -609,12 +560,4 @@ release {
git {
requireBranch.set('develop')
}
// Dynamically set properties with default values
failOnSnapshotDependencies = providers.gradleProperty("release.failOnSnapshotDependencies")
.map(val -> Boolean.parseBoolean(val))
.getOrElse(true)
pushReleaseVersionBranch = providers.gradleProperty("release.pushReleaseVersionBranch")
.getOrElse(null)
}

View File

@@ -12,9 +12,18 @@ dependencies {
implementation 'ch.qos.logback.contrib:logback-json-classic'
implementation 'ch.qos.logback.contrib:logback-jackson'
// OTLP metrics
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
// plugins
implementation 'org.eclipse.aether:aether-api'
implementation 'org.eclipse.aether:aether-spi'
implementation 'org.eclipse.aether:aether-util'
implementation 'org.eclipse.aether:aether-impl'
implementation 'org.eclipse.aether:aether-connector-basic'
implementation 'org.eclipse.aether:aether-transport-file'
implementation 'org.eclipse.aether:aether-transport-http'
implementation('org.apache.maven:maven-aether-provider') {
// sisu dependency injector is not used
exclude group: 'org.eclipse.sisu'
}
// aether still use javax.inject
compileOnly 'javax.inject:javax.inject:1'
@@ -34,7 +43,4 @@ dependencies {
implementation project(":storage-local")
implementation project(":webserver")
//test
testImplementation "org.wiremock:wiremock-jetty12"
}

View File

@@ -1,7 +1,5 @@
package io.kestra.cli;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.http.HttpHeaders;
import io.micronaut.http.HttpRequest;
@@ -16,9 +14,6 @@ import io.micronaut.http.netty.body.NettyJsonHandler;
import io.micronaut.json.JsonMapper;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
import picocli.CommandLine;
import java.net.URISyntaxException;
@@ -48,18 +43,8 @@ public abstract class AbstractApiCommand extends AbstractCommand {
@Nullable
private HttpClientConfiguration httpClientConfiguration;
/**
* {@inheritDoc}
*/
protected boolean loadExternalPlugins() {
return false;
}
protected DefaultHttpClient client() throws URISyntaxException {
DefaultHttpClient defaultHttpClient = DefaultHttpClient.builder()
.uri(server.toURI())
.configuration(httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration())
.build();
DefaultHttpClient defaultHttpClient = new DefaultHttpClient(server.toURI(), httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration());
MessageBodyHandlerRegistry defaultHandlerRegistry = defaultHttpClient.getHandlerRegistry();
if (defaultHandlerRegistry instanceof ContextlessMessageBodyHandlerRegistry modifiableRegistry) {
modifiableRegistry.add(MediaType.TEXT_JSON_TYPE, new NettyJsonHandler<>(JsonMapper.createDefault()));
@@ -92,14 +77,6 @@ public abstract class AbstractApiCommand extends AbstractCommand {
throw new IllegalArgumentException("'path' must be non-null and start with '/'");
}
return tenantId == null ? "/api/v1/" + MAIN_TENANT + path : "/api/v1/" + tenantId + path;
}
@Builder
@Value
@Jacksonized
public static class UpdateResult {
String id;
String namespace;
return tenantId == null ? "/api/v1" + path : "/api/v1/" + tenantId + path;
}
}

View File

@@ -4,17 +4,16 @@ import ch.qos.logback.classic.LoggerContext;
import com.google.common.collect.ImmutableMap;
import io.kestra.cli.commands.servers.ServerCommandInterface;
import io.kestra.cli.services.StartupHookInterface;
import io.kestra.core.plugins.PluginManager;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.webserver.services.FlowAutoLoaderService;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.yaml.YamlPropertySourceLoader;
import io.micronaut.core.annotation.Introspected;
import io.micronaut.http.uri.UriBuilder;
import io.micronaut.management.endpoint.EndpointDefaultConfiguration;
import io.micronaut.runtime.server.EmbeddedServer;
import jakarta.inject.Provider;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.client.utils.URIBuilder;
import io.kestra.core.utils.Rethrow;
import picocli.CommandLine;
@@ -27,13 +26,10 @@ import java.nio.file.Paths;
import java.text.MessageFormat;
import java.time.temporal.ChronoUnit;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Callable;
import jakarta.inject.Inject;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
@Command(
@CommandLine.Command(
versionProvider = VersionProvider.class,
mixinStandardHelpOptions = true,
showDefaultValues = true
@@ -53,28 +49,22 @@ abstract public class AbstractCommand implements Callable<Integer> {
@Inject
private io.kestra.core.utils.VersionProvider versionProvider;
@Inject
protected Provider<PluginRegistry> pluginRegistryProvider;
@Inject
protected Provider<PluginManager> pluginManagerProvider;
private PluginRegistry pluginRegistry;
@Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
@CommandLine.Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
private boolean[] verbose = new boolean[0];
@Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
@CommandLine.Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
private LogLevel logLevel = LogLevel.INFO;
@Option(names = {"--internal-log"}, description = "Change also log level for internal log")
@CommandLine.Option(names = {"--internal-log"}, description = "Change also log level for internal log")
private boolean internalLog = false;
@Option(names = {"-c", "--config"}, description = "Path to a configuration file")
@CommandLine.Option(names = {"-c", "--config"}, description = "Path to a configuration file")
private Path config = Paths.get(System.getProperty("user.home"), ".kestra/config.yml");
@Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
protected Path pluginsPath = Optional.ofNullable(System.getenv("KESTRA_PLUGINS_PATH")).map(Paths::get).orElse(null);
@CommandLine.Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
protected Path pluginsPath = System.getenv("KESTRA_PLUGINS_PATH") != null ? Paths.get(System.getenv("KESTRA_PLUGINS_PATH")) : null;
public enum LogLevel {
TRACE,
@@ -86,7 +76,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
@Override
public Integer call() throws Exception {
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(Command.class).name());
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(CommandLine.Command.class).name());
startLogger();
sendServerLog();
if (this.startupHook != null) {
@@ -94,14 +84,8 @@ abstract public class AbstractCommand implements Callable<Integer> {
}
if (this.pluginsPath != null && loadExternalPlugins()) {
pluginRegistry = pluginRegistryProvider.get();
pluginRegistry = pluginRegistry();
pluginRegistry.registerIfAbsent(pluginsPath);
// PluginManager mus only be initialized if a registry is also instantiated
if (isPluginManagerEnabled()) {
PluginManager manager = pluginManagerProvider.get();
manager.start();
}
}
startWebserver();
@@ -118,15 +102,8 @@ abstract public class AbstractCommand implements Callable<Integer> {
return true;
}
/**
* Specifies whether the {@link PluginManager} service must be initialized.
* <p>
* This method can be overridden by concrete commands.
*
* @return {@code true} if the {@link PluginManager} service must be initialized.
*/
protected boolean isPluginManagerEnabled() {
return true;
protected PluginRegistry pluginRegistry() {
return KestraContext.getContext().getPluginRegistry(); // Lazy init
}
private static String message(String message, Object... format) {
@@ -180,6 +157,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
logger.getName().startsWith("io.kestra") &&
!logger.getName().startsWith("io.kestra.ee.runner.kafka.services"))
)
|| logger.getName().startsWith("flow")
)
.forEach(
logger -> logger.setLevel(ch.qos.logback.classic.Level.valueOf(this.logLevel.name()))
@@ -205,9 +183,9 @@ abstract public class AbstractCommand implements Callable<Integer> {
if (this.endpointConfiguration.getPort().isPresent()) {
URI endpoint = null;
try {
endpoint = UriBuilder.of(server.getURL().toURI())
.port(this.endpointConfiguration.getPort().get())
.path("/health")
endpoint = new URIBuilder(server.getURL().toURI())
.setPort(this.endpointConfiguration.getPort().get())
.setPath("/health")
.build();
} catch (URISyntaxException e) {
e.printStackTrace();
@@ -229,12 +207,10 @@ abstract public class AbstractCommand implements Callable<Integer> {
return false;
}
protected void shutdownHook(boolean logShutdown, Rethrow.RunnableChecked<Exception> run) {
protected void shutdownHook(Rethrow.RunnableChecked<Exception> run) {
Runtime.getRuntime().addShutdownHook(new Thread(
() -> {
if (logShutdown) {
log.warn("Receiving shutdown ! Try to graceful exit");
}
log.warn("Receiving shutdown ! Try to graceful exit");
try {
run.run();
} catch (Exception e) {

View File

@@ -1,8 +1,9 @@
package io.kestra.cli;
import io.kestra.cli.commands.flows.FlowValidateCommand;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
@@ -31,12 +32,6 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "the directory containing files to check")
protected Path directory;
/** {@inheritDoc} **/
@Override
protected boolean loadExternalPlugins() {
return local;
}
public static void handleException(ConstraintViolationException e, String resource) {
stdErr("\t@|fg(red) Unable to parse {0} due to the following error(s):|@", resource);
e.getConstraintViolations()
@@ -68,19 +63,19 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
public static String buildYamlBody(Path directory) throws IOException {
try(var files = Files.walk(directory)) {
return files.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.filter(YamlFlowParser::isValidExtension)
.map(throwFunction(path -> Files.readString(path, Charset.defaultCharset())))
.collect(Collectors.joining("\n---\n"));
}
}
// bug in micronaut, we can't inject ModelValidator, so we inject from implementation
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation
public Integer call(
Class<?> cls,
YamlFlowParser yamlFlowParser,
ModelValidator modelValidator,
Function<Object, String> identity,
Function<Object, List<String>> warningsFunction,
Function<Object, List<String>> infosFunction
Function<Object, List<String>> warningsFunction
) throws Exception {
super.call();
@@ -90,16 +85,14 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
if(this.local) {
try(var files = Files.walk(directory)) {
files.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.filter(YamlFlowParser::isValidExtension)
.forEach(path -> {
try {
Object parse = YamlParser.parse(path.toFile(), cls);
Object parse = yamlFlowParser.parse(path.toFile(), cls);
modelValidator.validate(parse);
stdOut("@|green \u2713|@ - " + identity.apply(parse));
List<String> warnings = warningsFunction.apply(parse);
warnings.forEach(warning -> stdOut("@|bold,yellow \u26A0|@ - " + warning));
List<String> infos = infosFunction.apply(parse);
infos.forEach(info -> stdOut("@|bold,blue \u2139|@ - " + info));
} catch (ConstraintViolationException e) {
stdErr("@|red \u2718|@ - " + path);
AbstractValidateCommand.handleException(e, clsName);

View File

@@ -2,7 +2,6 @@ package io.kestra.cli;
import io.kestra.cli.commands.configs.sys.ConfigCommand;
import io.kestra.cli.commands.flows.FlowCommand;
import io.kestra.cli.commands.migrations.MigrationCommand;
import io.kestra.cli.commands.namespaces.NamespaceCommand;
import io.kestra.cli.commands.plugins.PluginCommand;
import io.kestra.cli.commands.servers.ServerCommand;
@@ -43,7 +42,6 @@ import java.util.concurrent.Callable;
SysCommand.class,
ConfigCommand.class,
NamespaceCommand.class,
MigrationCommand.class,
}
)
@Introspected
@@ -90,12 +88,11 @@ public class App implements Callable<Integer> {
.environments(Environment.CLI);
CommandLine cmd = new CommandLine(mainClass, CommandLine.defaultFactory());
continueOnParsingErrors(cmd);
CommandLine.ParseResult parseResult = cmd.parseArgs(args);
List<CommandLine> parsedCommands = parseResult.asCommandLineList();
CommandLine commandLine = parsedCommands.getLast();
CommandLine commandLine = parsedCommands.get(parsedCommands.size() - 1);
Class<?> cls = commandLine.getCommandSpec().userObject().getClass();
if (AbstractCommand.class.isAssignableFrom(cls)) {
@@ -117,17 +114,15 @@ public class App implements Callable<Integer> {
.stream()
.filter(argSpec -> ((Field) argSpec.userObject()).getName().equals("serverPort"))
.findFirst()
.ifPresent(argSpec -> properties.put("micronaut.server.port", argSpec.getValue()));
.ifPresent(argSpec -> {
properties.put("micronaut.server.port", argSpec.getValue());
});
builder.properties(properties);
}
return builder.build();
}
private static void continueOnParsingErrors(CommandLine cmd) {
cmd.getCommandSpec().parser().collectErrors(true);
}
@SuppressWarnings("unchecked")
private static <T> T getPropertiesFromMethod(Class<?> cls, String methodName, Object instance) {
try {

View File

@@ -1,18 +1,28 @@
package io.kestra.cli.commands;
import io.kestra.cli.AbstractApiCommand;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
import picocli.CommandLine;
import java.nio.file.Path;
public abstract class AbstractServiceNamespaceUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace to update")
@CommandLine.Parameters(index = "0", description = "the namespace to update")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The directory containing flow files for current namespace")
@CommandLine.Parameters(index = "1", description = "the directory containing files for current namespace")
public Path directory;
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "if missing should be deleted")
public boolean delete = false;
@Builder
@Value
@Jacksonized
public static class UpdateResult {
String id;
String namespace;
}
}

View File

@@ -8,7 +8,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "configs",
description = "Manage configuration",
description = "handle configs",
mixinStandardHelpOptions = true,
subcommands = {
ConfigPropertiesCommand.class,

View File

@@ -10,7 +10,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "properties",
description = {"Display current configuration properties."}
description = {"Display actual configurations properties."}
)
@Slf4j
public class ConfigPropertiesCommand extends AbstractCommand {

View File

@@ -10,7 +10,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "flow",
description = "Manage flows",
description = "handle flows",
mixinStandardHelpOptions = true,
subcommands = {
FlowValidateCommand.class,
@@ -18,8 +18,6 @@ import picocli.CommandLine;
FlowNamespaceCommand.class,
FlowDotCommand.class,
FlowExportCommand.class,
FlowUpdateCommand.class,
FlowUpdatesCommand.class
}
)
@Slf4j

View File

@@ -1,59 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.nio.file.Files;
import java.nio.file.Path;
@CommandLine.Command(
name = "create",
description = "Create a single flow",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowCreateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
public Path flowFile;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
checkFile();
String body = Files.readString(flowFile);
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows"), body).contentType(MediaType.APPLICATION_YAML);
client.toBlocking().retrieve(
this.requestOptions(request),
String.class
);
stdOut("Flow successfully created !");
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
return 0;
}
protected void checkFile() {
if (!Files.isRegularFile(flowFile)) {
throw new IllegalArgumentException("The file '" + flowFile.toFile().getAbsolutePath() + "' is not a file");
}
}
}

View File

@@ -1,47 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "delete",
description = "Delete a single flow",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowDeleteCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace of the flow")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The ID of the flow")
public String id;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.DELETE(apiUri("/flows/" + namespace + "/" + id ));
client.toBlocking().exchange(
this.requestOptions(request)
);
stdOut("Flow successfully deleted !");
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
return 0;
}
}

View File

@@ -3,7 +3,7 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.hierarchies.GraphCluster;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import io.kestra.core.services.Graph2DotService;
import io.kestra.core.utils.GraphUtils;
import io.micronaut.context.ApplicationContext;
@@ -15,21 +15,22 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "dot",
description = "Generate a DOT graph from a file"
description = "generate a dot graph from a file"
)
@Slf4j
public class FlowDotCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Parameters(index = "0", description = "The flow file to display")
@CommandLine.Parameters(index = "0", description = "the flow file to display")
private Path file;
@Override
public Integer call() throws Exception {
super.call();
Flow flow = YamlParser.parse(file.toFile(), Flow.class);
YamlFlowParser parser = applicationContext.getBean(YamlFlowParser.class);
Flow flow = parser.parse(file.toFile(), Flow.class);
GraphCluster graph = GraphUtils.of(flow, null);

View File

@@ -3,7 +3,7 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -12,14 +12,17 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "expand",
description = "Deprecated - expand a flow"
description = "deprecated - expand a flow"
)
@Deprecated
public class FlowExpandCommand extends AbstractCommand {
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
@CommandLine.Parameters(index = "0", description = "the flow file to expand")
private Path file;
@Inject
private YamlFlowParser yamlFlowParser;
@Inject
private ModelValidator modelValidator;
@@ -28,7 +31,7 @@ public class FlowExpandCommand extends AbstractCommand {
super.call();
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
Flow flow = YamlParser.parse(content, Flow.class);
Flow flow = yamlFlowParser.parse(content, Flow.class);
modelValidator.validate(flow);
stdOut(content);
return 0;

View File

@@ -18,7 +18,7 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "export",
description = "Export flows to a ZIP file",
description = "export flows to a zip file",
mixinStandardHelpOptions = true
)
@Slf4j
@@ -29,10 +29,10 @@ public class FlowExportCommand extends AbstractApiCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of flows to export")
@CommandLine.Option(names = {"--namespace"}, description = "the namespace of flows to export")
public String namespace;
@CommandLine.Parameters(index = "0", description = "The directory to export the ZIP file to")
@CommandLine.Parameters(index = "0", description = "the directory to export the file to")
public Path directory;
@Override

View File

@@ -27,19 +27,19 @@ import java.util.concurrent.TimeoutException;
@CommandLine.Command(
name = "test",
description = "Test a flow"
description = "test a flow"
)
@Slf4j
public class FlowTestCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Parameters(index = "0", description = "The flow file to test")
@CommandLine.Parameters(index = "0", description = "the flow file to test")
private Path file;
@CommandLine.Parameters(
index = "1..*",
description = "The inputs to pass as key pair value separated by space, " +
description = "the inputs to pass as key pair value separated by space, " +
"for input type file, you need to pass an absolute path."
)
private List<String> inputs = new ArrayList<>();

View File

@@ -1,64 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.nio.file.Files;
import java.nio.file.Path;
@CommandLine.Command(
name = "update",
description = "Update a single flow",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
public Path flowFile;
@CommandLine.Parameters(index = "1", description = "The namespace of the flow")
public String namespace;
@CommandLine.Parameters(index = "2", description = "The ID of the flow")
public String id;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
checkFile();
String body = Files.readString(flowFile);
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.PUT(apiUri("/flows/" + namespace + "/" + id ), body).contentType(MediaType.APPLICATION_YAML);
client.toBlocking().retrieve(
this.requestOptions(request),
String.class
);
stdOut("Flow successfully updated !");
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
return 0;
}
protected void checkFile() {
if (!Files.isRegularFile(flowFile)) {
throw new IllegalArgumentException("The file '" + flowFile.toFile().getAbsolutePath() + "' is not a file");
}
}
}

View File

@@ -1,95 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.serializers.YamlParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
@CommandLine.Command(
name = "updates",
description = "Create or update flows from a folder, and optionally delete the ones not present",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowUpdatesCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The directory containing files")
public Path directory;
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
public boolean delete = false;
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
public String namespace;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
try (var files = Files.walk(directory)) {
List<String> flows = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.map(path -> {
try {
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.toList();
String body = "";
if (flows.isEmpty()) {
stdOut("No flow found on '{}'", directory.toFile().getAbsolutePath());
} else {
body = String.join("\n---\n", flows);
}
try(DefaultHttpClient client = client()) {
String namespaceQuery = "";
if (namespace != null) {
namespaceQuery = "&namespace=" + namespace;
}
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/bulk") + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),
Argument.listOf(UpdateResult.class)
);
stdOut(updated.size() + " flow(s) successfully updated !");
updated.forEach(flow -> stdOut("- " + flow.getNamespace() + "." + flow.getId()));
} catch (HttpClientResponseException e){
AbstractValidateCommand.handleHttpException(e, "flow");
return 1;
}
} catch (ConstraintViolationException e) {
AbstractValidateCommand.handleException(e, "flow");
return 1;
}
return 0;
}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,8 +1,9 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlFlowParser;
import io.kestra.core.services.FlowService;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -12,9 +13,11 @@ import java.util.List;
@CommandLine.Command(
name = "validate",
description = "Validate a flow"
description = "validate a flow"
)
public class FlowValidateCommand extends AbstractValidateCommand {
@Inject
private YamlFlowParser yamlFlowParser;
@Inject
private ModelValidator modelValidator;
@@ -25,22 +28,20 @@ public class FlowValidateCommand extends AbstractValidateCommand {
@Override
public Integer call() throws Exception {
return this.call(
FlowWithSource.class,
Flow.class,
yamlFlowParser,
modelValidator,
(Object object) -> {
FlowWithSource flow = (FlowWithSource) object;
Flow flow = (Flow) object;
return flow.getNamespace() + " / " + flow.getId();
},
(Object object) -> {
FlowWithSource flow = (FlowWithSource) object;
Flow flow = (Flow) object;
List<String> warnings = new ArrayList<>();
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
warnings.addAll(flowService.warnings(flow, this.tenantId));
warnings.addAll(flowService.relocations(flow.generateSource()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList());
warnings.addAll(flowService.warnings(flow));
return warnings;
},
(Object object) -> {
FlowWithSource flow = (FlowWithSource) object;
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
}
);
}

View File

@@ -9,7 +9,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "namespace",
description = "Manage namespace flows",
description = "handle namespace flows",
mixinStandardHelpOptions = true,
subcommands = {
FlowNamespaceUpdateCommand.class,

View File

@@ -2,18 +2,20 @@ package io.kestra.cli.commands.flows.namespaces;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
import io.kestra.cli.commands.flows.FlowValidateCommand;
import io.kestra.cli.commands.flows.IncludeHelperExpander;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.validation.ConstraintViolationException;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import jakarta.validation.ConstraintViolationException;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
@@ -21,14 +23,13 @@ import java.util.List;
@CommandLine.Command(
name = "update",
description = "Update flows in namespace",
description = "handle namespace flows",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
public boolean override = false;
@Inject
public YamlFlowParser yamlFlowParser;
@SuppressWarnings("deprecation")
@Override
@@ -38,7 +39,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
try (var files = Files.walk(directory)) {
List<String> flows = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.filter(YamlFlowParser::isValidExtension)
.map(path -> {
try {
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
@@ -54,9 +55,6 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
} else {
body = String.join("\n---\n", flows);
}
if (override) {
body = body.replaceAll("(?m)^namespace:.+", "namespace: " + namespace);
}
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/") + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);

View File

@@ -1,29 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "migrate",
description = "handle migrations",
mixinStandardHelpOptions = true,
subcommands = {
TenantMigrationCommand.class,
}
)
@Slf4j
public class MigrationCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "migrate", "--help");
return 0;
}
}

View File

@@ -1,49 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.repositories.TenantMigrationInterface;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
@CommandLine.Command(
name = "default-tenant",
description = "migrate every elements from no tenant to the main tenant"
)
@Slf4j
public class TenantMigrationCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@Option(names = "--tenant-id", description = "tenant identifier")
String tenantId;
@Option(names = "--tenant-name", description = "tenant name")
String tenantName;
@Option(names = "--dry-run", description = "Preview only, do not update")
boolean dryRun;
@Override
public Integer call() throws Exception {
super.call();
if (dryRun) {
System.out.println("🧪 Dry-run mode enabled. No changes will be applied.");
}
TenantMigrationService migrationService = this.applicationContext.getBean(TenantMigrationService.class);
try {
migrationService.migrateTenant(tenantId, tenantName, dryRun);
System.out.println("✅ Tenant migration complete.");
} catch (Exception e) {
System.err.println("❌ Tenant migration failed: " + e.getMessage());
e.printStackTrace();
return 1;
}
return 0;
}
}

View File

@@ -1,56 +0,0 @@
package io.kestra.cli.commands.migrations;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import com.github.javaparser.utils.Log;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.TenantMigrationInterface;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
@Singleton
@Slf4j
public class TenantMigrationService {
@Inject
private TenantMigrationInterface tenantMigrationInterface;
@Inject
private FlowRepositoryInterface flowRepository;
@Inject
@Named(QueueFactoryInterface.FLOW_NAMED)
private QueueInterface<FlowInterface> flowQueue;
public void migrateTenant(String tenantId, String tenantName, boolean dryRun) {
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
throw new KestraRuntimeException("Tenant configuration is an enterprise feature. It can only be main in OSS");
}
Log.info("🔁 Starting tenant migration...");
tenantMigrationInterface.migrateTenant(MAIN_TENANT, dryRun);
migrateQueue(dryRun);
}
protected void migrateQueue(boolean dryRun) {
if (!dryRun){
log.info("🔁 Starting restoring queue...");
flowRepository.findAllWithSourceForAllTenants().forEach(flow -> {
try {
flowQueue.emit(flow);
} catch (QueueException e) {
log.warn("Unable to send the flow {} to the queue", flow.uid(), e);
}
});
}
}
}

View File

@@ -11,7 +11,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "namespace",
description = "Manage namespaces",
description = "handle namespaces",
mixinStandardHelpOptions = true,
subcommands = {
NamespaceFilesCommand.class,

View File

@@ -9,7 +9,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "files",
description = "Manage namespace files",
description = "handle namespace files",
mixinStandardHelpOptions = true,
subcommands = {
NamespaceFilesUpdateCommand.class,

View File

@@ -17,21 +17,21 @@ import java.util.List;
@CommandLine.Command(
name = "update",
description = "Update namespace files",
description = "update namespace files",
mixinStandardHelpOptions = true
)
@Slf4j
public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace to update")
@CommandLine.Parameters(index = "0", description = "the namespace to update")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The local directory containing files for current namespace")
@CommandLine.Parameters(index = "1", description = "the local directory containing files for current namespace")
public Path from;
@CommandLine.Parameters(index = "2", description = "The remote namespace path to upload files to", defaultValue = "/")
@CommandLine.Parameters(index = "2", description = "the remote namespace path to upload files to", defaultValue = "/")
public String to;
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "if missing should be deleted")
public boolean delete = false;
private static final String KESTRA_IGNORE_FILE = ".kestraignore";

View File

@@ -9,7 +9,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "kv",
description = "Manage KV Store",
description = "handle KV Store",
mixinStandardHelpOptions = true,
subcommands = {
KvUpdateCommand.class,

View File

@@ -18,28 +18,28 @@ import java.time.Duration;
@CommandLine.Command(
name = "update",
description = "Update value for a KV Store key",
description = "update value for a KV Store key",
mixinStandardHelpOptions = true
)
@Slf4j
public class KvUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The namespace to update")
@CommandLine.Parameters(index = "0", description = "the namespace to update")
public String namespace;
@CommandLine.Parameters(index = "1", description = "The key to update")
@CommandLine.Parameters(index = "1", description = "the key to update")
public String key;
@CommandLine.Parameters(index = "2", description = "The value to assign to the key. If the value is an object, it must be in JSON format. If the value must be read from file, use -f parameter.")
@CommandLine.Parameters(index = "2", description = "the value to assign to the key. If the value is an object, it must be in JSON format. If the value must be read from file, use -f parameter.")
public String value;
@Option(names = {"-e", "--expiration"}, description = "The duration after which the key should expire.")
@Option(names = {"-e", "--expiration"}, description = "the duration after which the key should expire.")
public String expiration;
@Option(names = {"-t", "--type"}, description = "The type of the value. Optional and useful to override the deduced type (eg. numbers, booleans or JSON as full string). Valid values: ${COMPLETION-CANDIDATES}.")
@Option(names = {"-t", "--type"}, description = "the type of the value. Optional and useful to override the deduced type (eg. numbers, booleans or JSON as full string). Valid values: ${COMPLETION-CANDIDATES}.")
public Type type;
@Option(names = {"-f", "--file-value"}, description = "The file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
@Option(names = {"-f", "--file-value"}, description = "the file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
public Path fileValue;
@Override

View File

@@ -1,37 +1,31 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import picocli.CommandLine.Command;
import lombok.extern.slf4j.Slf4j;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import picocli.CommandLine;
@Command(
@CommandLine.Command(
name = "plugins",
description = "Manage plugins",
description = "handle plugins",
mixinStandardHelpOptions = true,
subcommands = {
PluginInstallCommand.class,
PluginUninstallCommand.class,
PluginListCommand.class,
PluginDocCommand.class,
PluginSearchCommand.class
PluginDocCommand.class
}
)
@Slf4j
public class PluginCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "plugins", "--help");
PicocliRunner.call(App.class, "plugins", "--help");
return 0;
}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,11 +1,10 @@
package io.kestra.cli.commands.plugins;
import com.google.common.io.Files;
import com.google.common.base.Charsets;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.docs.DocumentationGenerator;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.plugins.RegisteredPlugin;
import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -20,13 +19,13 @@ import java.util.List;
@CommandLine.Command(
name = "doc",
description = "Generate documentation for all plugins currently installed"
description = "write documentation for all plugins currently installed"
)
public class PluginDocCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Parameters(index = "0", description = "Path to write documentation files")
@CommandLine.Parameters(index = "0", description = "Path to write documentations files")
private Path output = Paths.get(System.getProperty("user.dir"), "docs");
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks docs files")
@@ -35,78 +34,51 @@ public class PluginDocCommand extends AbstractCommand {
@CommandLine.Option(names = {"--icons"}, description = "Also write icon for each task")
private boolean icons = false;
@CommandLine.Option(names = {"--schema"}, description = "Also write JSON Schema for each task")
private boolean schema = false;
@Override
public Integer call() throws Exception {
super.call();
DocumentationGenerator documentationGenerator = applicationContext.getBean(DocumentationGenerator.class);
PluginRegistry registry = pluginRegistryProvider.get();
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
boolean hasFailures = false;
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
for (RegisteredPlugin registeredPlugin : plugins) {
try {
documentationGenerator
.generate(registeredPlugin)
.forEach(s -> {
File file = Paths.get(output.toAbsolutePath().toString(), s.getPath()).toFile();
documentationGenerator
.generate(registeredPlugin)
.forEach(s -> {
File file = Paths.get(output.toAbsolutePath().toString(), s.getPath()).toFile();
if (!file.getParentFile().exists()) {
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
}
try {
Files
.asCharSink(
file,
StandardCharsets.UTF_8
).write(s.getBody());
stdOut("Generate doc in: {0}", file);
if (s.getIcon() != null && this.icons) {
File iconFile = new File(
file.getParent(),
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".svg"
);
Files
.asByteSink(iconFile)
.write(Base64.getDecoder().decode(s.getIcon().getBytes(StandardCharsets.UTF_8)));
stdOut("Generate icon in: {0}", iconFile);
}
if (this.schema && s.getSchema() != null) {
File jsonSchemaFile = new File(
file.getParent(),
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".json"
);
Files
.asByteSink(jsonSchemaFile)
.write(JacksonMapper.ofJson().writeValueAsBytes(s.getSchema()));
stdOut("Generate json schema in: {0}", jsonSchemaFile);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
if (!file.getParentFile().exists()) {
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
}
);
} catch (Error e) {
stdErr("Failure to generate documentation for plugin {0}: {1}", registeredPlugin.name(), e);
hasFailures = true;
}
try {
com.google.common.io.Files
.asCharSink(
file,
Charsets.UTF_8
).write(s.getBody());
stdOut("Generate doc in: {0}", file);
if (s.getIcon() != null && this.icons) {
File iconFile = new File(
file.getParent(),
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".svg"
);
com.google.common.io.Files
.asByteSink(iconFile)
.write(Base64.getDecoder().decode(s.getIcon().getBytes(StandardCharsets.UTF_8)));
stdOut("Generate icon in: {0}", iconFile);
}
stdOut("Generate doc in: {0}", file);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
);
}
return hasFailures ? 1 : 0;
}
/** {@inheritDoc} **/
@Override
protected boolean isPluginManagerEnabled() {
return false;
return 0;
}
}

View File

@@ -1,133 +1,100 @@
package io.kestra.cli.commands.plugins;
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.plugins.LocalPluginManager;
import io.kestra.core.plugins.MavenPluginDownloader;
import io.kestra.core.plugins.PluginArtifact;
import io.kestra.core.plugins.PluginCatalogService;
import io.kestra.core.plugins.PluginManager;
import io.micronaut.http.client.HttpClient;
import io.micronaut.http.client.annotation.Client;
import io.micronaut.http.uri.UriBuilder;
import org.apache.commons.io.FilenameUtils;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.plugins.PluginDownloader;
import io.kestra.cli.plugins.RepositoryConfig;
import io.kestra.core.utils.IdUtils;
import jakarta.inject.Provider;
import org.apache.http.client.utils.URIBuilder;
import picocli.CommandLine;
import java.net.URI;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import jakarta.inject.Inject;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import picocli.CommandLine.Option;
import picocli.CommandLine.Spec;
@Command(
import static io.kestra.core.utils.Rethrow.throwConsumer;
@CommandLine.Command(
name = "install",
description = "Install plugins"
description = "install a plugin"
)
public class PluginInstallCommand extends AbstractCommand {
@Option(names = {"--locally"}, description = "Specifies if plugins must be installed locally. If set to false the installation depends on your Kestra configuration.")
boolean locally = true;
@Option(names = {"--all"}, description = "Install all available plugins")
boolean all = false;
@Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
@CommandLine.Parameters(index = "0..*", description = "the plugins to install")
List<String> dependencies = new ArrayList<>();
@Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
@CommandLine.Option(names = {"--repositories"}, description = "url to additional maven repositories")
private URI[] repositories;
@Spec
@CommandLine.Spec
CommandLine.Model.CommandSpec spec;
@Inject
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
@Inject
Provider<PluginCatalogService> pluginCatalogService;
private PluginDownloader pluginDownloader;
@Override
public Integer call() throws Exception {
super.call();
if (this.locally && this.pluginsPath == null) {
if (this.pluginsPath == null) {
throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " +
"or environment variable 'KESTRA_PLUGINS_PATH"
);
}
List<MavenPluginRepositoryConfig> repositoryConfigs = List.of();
if (!pluginsPath.toFile().exists()) {
if (!pluginsPath.toFile().mkdir()) {
throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath());
}
}
if (repositories != null) {
repositoryConfigs = Arrays.stream(repositories)
.map(uri -> {
MavenPluginRepositoryConfig.MavenPluginRepositoryConfigBuilder builder = MavenPluginRepositoryConfig
.builder()
Arrays.stream(repositories)
.forEach(throwConsumer(s -> {
URIBuilder uriBuilder = new URIBuilder(s);
RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder()
.id(IdUtils.create());
String userInfo = uri.getUserInfo();
if (userInfo != null) {
String[] userInfoParts = userInfo.split(":");
builder = builder.basicAuth(new MavenPluginRepositoryConfig.BasicAuth(
userInfoParts[0],
userInfoParts[1]
if (uriBuilder.getUserInfo() != null) {
int index = uriBuilder.getUserInfo().indexOf(":");
builder.basicAuth(new RepositoryConfig.BasicAuth(
uriBuilder.getUserInfo().substring(0, index),
uriBuilder.getUserInfo().substring(index + 1)
));
uriBuilder.setUserInfo(null);
}
builder.url(UriBuilder.of(uri).userInfo(null).build().toString());
return builder.build();
}).toList();
builder.url(uriBuilder.build().toString());
pluginDownloader.addRepository(builder.build());
}));
}
if (all) {
PluginCatalogService service = pluginCatalogService.get();
dependencies = service.get().stream().map(Objects::toString).toList();
List<URL> resolveUrl = pluginDownloader.resolve(dependencies);
stdOut("Resolved Plugin(s) with {0}", resolveUrl);
for (URL url: resolveUrl) {
Files.copy(
Paths.get(url.toURI()),
Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())),
StandardCopyOption.REPLACE_EXISTING
);
}
if (dependencies.isEmpty()) {
stdErr("Error: No plugin to install.");
return CommandLine.ExitCode.OK;
}
stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath);
final List<PluginArtifact> pluginArtifacts;
try {
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
} catch (IllegalArgumentException e) {
stdErr(e.getMessage());
return CommandLine.ExitCode.USAGE;
}
try (final PluginManager pluginManager = getPluginManager()) {
List<PluginArtifact> installed;
if (all) {
installed = new ArrayList<>(pluginArtifacts.size());
for (PluginArtifact pluginArtifact : pluginArtifacts) {
try {
installed.add(pluginManager.install(pluginArtifact, repositoryConfigs, false, pluginsPath));
} catch (KestraRuntimeException e) {
String cause = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
stdErr("Failed to install plugin {0}. Cause: {1}", pluginArtifact, cause);
}
}
} else {
installed = pluginManager.install(pluginArtifacts, repositoryConfigs, false, pluginsPath);
}
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
return CommandLine.ExitCode.OK;
}
}
private PluginManager getPluginManager() {
return locally ? new LocalPluginManager(mavenPluginRepositoryProvider.get()) : this.pluginManagerProvider.get();
return 0;
}
@Override

View File

@@ -1,31 +1,22 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.plugins.RegisteredPlugin;
import jakarta.inject.Inject;
import jakarta.inject.Provider;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
import picocli.CommandLine.Spec;
import java.util.List;
@Command(
@CommandLine.Command(
name = "list",
description = "List all plugins already installed"
description = "list all plugins already installed"
)
public class PluginListCommand extends AbstractCommand {
@Spec
@CommandLine.Spec
CommandLine.Model.CommandSpec spec;
@Option(names = {"--core"}, description = "Also write core tasks plugins")
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks plugins")
private boolean core = false;
@Inject
private PluginRegistry registry;
@Override
public Integer call() throws Exception {
super.call();
@@ -35,9 +26,8 @@ public class PluginListCommand extends AbstractCommand {
"or environment variable 'KESTRA_PLUGINS_PATH"
);
}
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
plugins.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
return 0;

View File

@@ -1,149 +0,0 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.client.HttpClient;
import io.micronaut.http.client.annotation.Client;
import jakarta.inject.Inject;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.ArrayList;
import java.util.List;
@Command(
name = "search",
description = "Search for available Kestra plugins"
)
public class PluginSearchCommand extends AbstractCommand {
@Inject
@Client("api")
private HttpClient httpClient;
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final char SPACE = ' ';
@Parameters(index = "0", description = "Search term (optional)", defaultValue = "")
private String searchTerm;
@Override
public Integer call() throws Exception {
super.call();
try {
JsonNode root = fetchPlugins();
List<PluginInfo> plugins = findPlugins(root);
printResults(plugins);
return 0;
} catch (Exception e) {
stdOut("Error processing plugins: {0}", e.getMessage());
return 1;
}
}
private JsonNode fetchPlugins() throws Exception {
String response = httpClient.toBlocking()
.retrieve(
HttpRequest.GET("/v1/plugins")
.header("Accept", "application/json")
);
return MAPPER.readTree(response);
}
private List<PluginInfo> findPlugins(JsonNode root) {
String searchTermLower = searchTerm.toLowerCase();
List<PluginInfo> plugins = new ArrayList<>();
for (JsonNode plugin : root) {
if (matchesSearch(plugin, searchTermLower)) {
plugins.add(new PluginInfo(
plugin.path("name").asText(),
plugin.path("title").asText(),
plugin.path("group").asText(),
plugin.path("version").asText("")
));
}
}
plugins.sort((p1, p2) -> p1.name.compareToIgnoreCase(p2.name));
return plugins;
}
private boolean matchesSearch(JsonNode plugin, String term) {
if (term.isEmpty()) {
return true;
}
return plugin.path("name").asText().toLowerCase().contains(term) ||
plugin.path("title").asText().toLowerCase().contains(term) ||
plugin.path("group").asText().toLowerCase().contains(term);
}
private void printResults(List<PluginInfo> plugins) {
if (plugins.isEmpty()) {
stdOut("No plugins found{0}",
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'");
return;
}
stdOut("\nFound {0} plugins{1}",
plugins.size(),
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'"
);
printPluginsTable(plugins);
}
private void printPluginsTable(List<PluginInfo> plugins) {
int maxName = 4, maxTitle = 5, maxGroup = 5;
for (PluginInfo plugin : plugins) {
maxName = Math.max(maxName, plugin.name.length());
maxTitle = Math.max(maxTitle, plugin.title.length());
maxGroup = Math.max(maxGroup, plugin.group.length());
}
StringBuilder namePad = new StringBuilder(maxName);
StringBuilder titlePad = new StringBuilder(maxTitle);
StringBuilder groupPad = new StringBuilder(maxGroup);
stdOut("");
printRow(namePad, titlePad, groupPad, "NAME", "TITLE", "GROUP", "VERSION",
maxName, maxTitle, maxGroup);
for (PluginInfo plugin : plugins) {
printRow(namePad, titlePad, groupPad, plugin.name, plugin.title, plugin.group, plugin.version,
maxName, maxTitle, maxGroup);
}
stdOut("");
}
private void printRow(StringBuilder namePad, StringBuilder titlePad, StringBuilder groupPad,
String name, String title, String group, String version,
int maxName, int maxTitle, int maxGroup) {
stdOut("{0} {1} {2} {3}",
pad(namePad, name, maxName),
pad(titlePad, title, maxTitle),
pad(groupPad, group, maxGroup),
version
);
}
private String pad(StringBuilder sb, String str, int length) {
sb.setLength(0);
sb.append(str);
while (sb.length() < length) {
sb.append(SPACE);
}
return sb.toString();
}
private record PluginInfo(String name, String title, String group, String version) {}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,69 +0,0 @@
package io.kestra.cli.commands.plugins;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.plugins.LocalPluginManager;
import io.kestra.core.plugins.MavenPluginDownloader;
import io.kestra.core.plugins.PluginArtifact;
import io.kestra.core.plugins.PluginManager;
import jakarta.inject.Inject;
import jakarta.inject.Provider;
import picocli.CommandLine;
import picocli.CommandLine.Parameters;
import picocli.CommandLine.Spec;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
@CommandLine.Command(
name = "uninstall",
description = "Uninstall plugins"
)
public class PluginUninstallCommand extends AbstractCommand {
@Parameters(index = "0..*", description = "The plugins to uninstall. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
List<String> dependencies = new ArrayList<>();
@Spec
CommandLine.Model.CommandSpec spec;
@Inject
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
@Override
public Integer call() throws Exception {
super.call();
List<PluginArtifact> pluginArtifacts;
try {
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
} catch (IllegalArgumentException e) {
stdErr(e.getMessage());
return CommandLine.ExitCode.USAGE;
}
final PluginManager pluginManager;
// If a PLUGIN_PATH is provided, then use the LocalPluginManager
if (pluginsPath != null) {
pluginManager = new LocalPluginManager(mavenPluginRepositoryProvider.get());
} else {
// Otherwise, we delegate to the configured plugin-manager.
pluginManager = this.pluginManagerProvider.get();
}
List<PluginArtifact> uninstalled = pluginManager.uninstall(
pluginArtifacts,
false,
pluginsPath
);
List<URI> uris = uninstalled.stream().map(PluginArtifact::uri).toList();
stdOut("Successfully uninstalled plugins {0} from {1}", dependencies, uris);
return CommandLine.ExitCode.OK;
}
@Override
protected boolean loadExternalPlugins() {
return false;
}
}

View File

@@ -1,20 +1,12 @@
package io.kestra.cli.commands.servers;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.contexts.KestraContext;
import jakarta.annotation.PostConstruct;
import picocli.CommandLine;
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
@CommandLine.Option(names = {"--port"}, description = "the port to bind")
Integer serverPort;
@Override
public Integer call() throws Exception {
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
return super.call();
}
protected static int defaultWorkerThread() {
return Runtime.getRuntime().availableProcessors() * 4;
}

View File

@@ -1,6 +1,7 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.ExecutorInterface;
import io.kestra.core.services.SkipExecutionService;
@@ -8,6 +9,7 @@ import io.kestra.core.services.StartExecutorService;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.util.Collections;
@@ -16,8 +18,9 @@ import java.util.Map;
@CommandLine.Command(
name = "executor",
description = "Start the Kestra executor"
description = "start an executor"
)
@Slf4j
public class ExecutorCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@@ -28,22 +31,22 @@ public class ExecutorCommand extends AbstractServerCommand {
@Inject
private StartExecutorService startExecutorService;
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "The list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipExecutions = Collections.emptyList();
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "The list of flow identifiers (tenant|namespace|flowId) to skip, separated by a coma; for troubleshooting purpose only")
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "a list of flow identifiers (tenant|namespace|flowId) to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipFlows = Collections.emptyList();
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "The list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "a list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipNamespaces = Collections.emptyList();
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "The list of tenants to skip, separated by a coma; for troubleshooting purpose only")
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "a list of tenants to skip, separated by a coma; for troubleshooting purpose only")
private List<String> skipTenants = Collections.emptyList();
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "The list of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "a list of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
private List<String> startExecutors = Collections.emptyList();
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "The list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "a list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
private List<String> notStartExecutors = Collections.emptyList();
@SuppressWarnings("unused")
@@ -63,10 +66,13 @@ public class ExecutorCommand extends AbstractServerCommand {
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
executorService.run();
log.info("Executor started");
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -1,19 +1,22 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.IndexerInterface;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.util.Map;
@CommandLine.Command(
name = "indexer",
description = "Start the Kestra indexer"
description = "start an indexer"
)
@Slf4j
public class IndexerCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@@ -28,10 +31,13 @@ public class IndexerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
indexer.run();
log.info("Indexer started");
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -12,7 +12,7 @@ import java.util.Map;
@CommandLine.Command(
name = "local",
description = "Start the local development server"
description = "start a local server"
)
public class LocalCommand extends StandAloneCommand {
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe

View File

@@ -1,6 +1,7 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.schedulers.AbstractScheduler;
import io.kestra.core.utils.Await;
@@ -13,7 +14,7 @@ import java.util.Map;
@CommandLine.Command(
name = "scheduler",
description = "Start the Kestra scheduler"
description = "start an scheduler"
)
@Slf4j
public class SchedulerCommand extends AbstractServerCommand {
@@ -30,10 +31,12 @@ public class SchedulerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
scheduler.run();
log.info("Scheduler started");
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -9,7 +9,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "server",
description = "Manage servers",
description = "handle servers",
mixinStandardHelpOptions = true,
subcommands = {
ExecutorCommand.class,

View File

@@ -1,7 +1,6 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.cli.services.FileChangedEventListener;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
@@ -10,8 +9,8 @@ import io.kestra.core.services.SkipExecutionService;
import io.kestra.core.services.StartExecutorService;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.annotation.Nullable;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.io.File;
@@ -22,8 +21,9 @@ import java.util.Map;
@CommandLine.Command(
name = "standalone",
description = "Start the standalone all-in-one server"
description = "start a standalone server"
)
@Slf4j
public class StandAloneCommand extends AbstractServerCommand {
@CommandLine.Spec
CommandLine.Model.CommandSpec spec;
@@ -37,14 +37,10 @@ public class StandAloneCommand extends AbstractServerCommand {
@Inject
private StartExecutorService startExecutorService;
@Inject
@Nullable
private FileChangedEventListener fileWatcher;
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "the flow path containing flow to inject at startup (when running with a memory flow repository)")
private File flowPath;
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to four times the number of available processors. Set it to 0 to avoid starting a worker.")
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to two times the number of available processors. Set it to 0 to avoid starting a worker.")
private int workerThread = defaultWorkerThread();
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
@@ -68,9 +64,6 @@ public class StandAloneCommand extends AbstractServerCommand {
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "a list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
private List<String> notStartExecutors = Collections.emptyList();
@CommandLine.Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
boolean indexerDisabled = false;
@Override
public boolean isFlowAutoLoadEnabled() {
return !tutorialsDisabled;
@@ -89,16 +82,16 @@ public class StandAloneCommand extends AbstractServerCommand {
this.skipExecutionService.setSkipFlows(skipFlows);
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
this.skipExecutionService.setSkipTenants(skipTenants);
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
if (flowPath != null) {
try {
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
localFlowRepositoryLoader.load(null, this.flowPath);
localFlowRepositoryLoader.load(this.flowPath);
} catch (IOException e) {
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
}
@@ -112,16 +105,8 @@ public class StandAloneCommand extends AbstractServerCommand {
standAloneRunner.setWorkerThread(this.workerThread);
}
if (this.indexerDisabled) {
standAloneRunner.setIndexerEnabled(false);
}
standAloneRunner.run();
if (fileWatcher != null) {
fileWatcher.startListeningFromConfig();
}
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -1,10 +1,9 @@
package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.IndexerInterface;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.ExecutorsUtils;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
@@ -12,27 +11,19 @@ import picocli.CommandLine;
import picocli.CommandLine.Option;
import java.util.Map;
import java.util.concurrent.ExecutorService;
@CommandLine.Command(
name = "webserver",
description = "Start the Kestra webserver"
description = "start the webserver"
)
@Slf4j
public class WebServerCommand extends AbstractServerCommand {
private ExecutorService poolExecutor;
@Inject
private ApplicationContext applicationContext;
@Inject
private ExecutorsUtils executorsUtils;
@Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
boolean tutorialsDisabled = false;
@Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
boolean indexerDisabled = false;
@Override
public boolean isFlowAutoLoadEnabled() {
@@ -49,16 +40,8 @@ public class WebServerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
super.call();
// start the indexer
if (!indexerDisabled) {
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
shutdownHook(false, () -> poolExecutor.shutdown());
}
log.info("Webserver started");
this.shutdownHook(() -> KestraContext.getContext().shutdown());
Await.until(() -> !this.applicationContext.isRunning());
return 0;
}

View File

@@ -7,6 +7,7 @@ import io.kestra.core.runners.Worker;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
@@ -15,17 +16,18 @@ import java.util.UUID;
@CommandLine.Command(
name = "worker",
description = "Start the Kestra worker"
description = "start a worker"
)
@Slf4j
public class WorkerCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to four times the number of available processors")
@Option(names = {"-t", "--thread"}, description = "the max number of worker threads, defaults to two times the number of available processors")
private int thread = defaultWorkerThread();
@Option(names = {"-g", "--worker-group"}, description = "The worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
@Option(names = {"-g", "--worker-group"}, description = "the worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
private String workerGroupKey = null;
@SuppressWarnings("unused")
@@ -37,11 +39,8 @@ public class WorkerCommand extends AbstractServerCommand {
@Override
public Integer call() throws Exception {
KestraContext.getContext().injectWorkerConfigs(thread, workerGroupKey);
super.call();
this.shutdownHook(() -> KestraContext.getContext().shutdown());
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
}
@@ -53,6 +52,13 @@ public class WorkerCommand extends AbstractServerCommand {
worker.run();
if (this.workerGroupKey != null) {
log.info("Worker started with {} thread(s) in group '{}'", this.thread, this.workerGroupKey);
}
else {
log.info("Worker started with {} thread(s)", this.thread);
}
Await.until(() -> !this.applicationContext.isRunning());
return 0;

View File

@@ -2,7 +2,6 @@ package io.kestra.cli.commands.sys;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -10,11 +9,10 @@ import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.util.List;
import java.util.Objects;
@CommandLine.Command(
name = "reindex",
description = "Reindex all records of a type: read them from the database then update them",
description = "reindex all records of a type: read them from the database then update them",
mixinStandardHelpOptions = true
)
@Slf4j
@@ -35,8 +33,8 @@ public class ReindexCommand extends AbstractCommand {
List<Flow> allFlow = flowRepository.findAllForAllTenants();
allFlow.stream()
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
.filter(Objects::nonNull)
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
.filter(flow -> flow != null)
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow()));
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
}

View File

@@ -10,7 +10,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "sys",
description = "Manage system maintenance mode",
description = "handle systems maintenance",
mixinStandardHelpOptions = true,
subcommands = {
ReindexCommand.class,

View File

@@ -1,5 +1,6 @@
package io.kestra.cli.commands.sys.database;
import ch.qos.logback.classic.Level;
import io.kestra.cli.AbstractCommand;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;

View File

@@ -44,7 +44,7 @@ public class StateStoreMigrateCommand extends AbstractCommand {
URI.create("/" + flow.getNamespace().replace(".", "/") + "/states")
))).map(potentialStateStoreUrisForAFlow -> Map.entry(potentialStateStoreUrisForAFlow.getKey(), potentialStateStoreUrisForAFlow.getValue().stream().flatMap(uri -> {
try {
return storageInterface.allByPrefix(potentialStateStoreUrisForAFlow.getKey().getTenantId(), potentialStateStoreUrisForAFlow.getKey().getNamespace(), uri, false).stream();
return storageInterface.allByPrefix(potentialStateStoreUrisForAFlow.getKey().getTenantId(), uri, false).stream();
} catch (IOException e) {
return Stream.empty();
}
@@ -59,9 +59,9 @@ public class StateStoreMigrateCommand extends AbstractCommand {
boolean flowScoped = flowQualifierWithStateQualifiers[0].endsWith("/" + flow.getId());
StateStore stateStore = new StateStore(runContext(runContextFactory, flow), false);
try (InputStream is = storageInterface.get(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri)) {
try (InputStream is = storageInterface.get(flow.getTenantId(), stateStoreFileUri)) {
stateStore.putState(flowScoped, stateName, stateSubName, taskRunValue, is.readAllBytes());
storageInterface.delete(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri);
storageInterface.delete(flow.getTenantId(), stateStoreFileUri);
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@@ -11,7 +11,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "template",
description = "Manage templates",
description = "handle templates",
mixinStandardHelpOptions = true,
subcommands = {
TemplateNamespaceCommand.class,

View File

@@ -19,7 +19,7 @@ import java.nio.file.Path;
@CommandLine.Command(
name = "export",
description = "Export templates to a ZIP file",
description = "export templates to a zip file",
mixinStandardHelpOptions = true
)
@Slf4j
@@ -31,10 +31,10 @@ public class TemplateExportCommand extends AbstractApiCommand {
@Inject
private ApplicationContext applicationContext;
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
@CommandLine.Option(names = {"--namespace"}, description = "the namespace of templates to export")
public String namespace;
@CommandLine.Parameters(index = "0", description = "The directory to export the file to")
@CommandLine.Parameters(index = "0", description = "the directory to export the file to")
public Path directory;
@Override

View File

@@ -4,6 +4,7 @@ import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.templates.TemplateEnabled;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlFlowParser;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -11,10 +12,12 @@ import java.util.Collections;
@CommandLine.Command(
name = "validate",
description = "Validate a template"
description = "validate a template"
)
@TemplateEnabled
public class TemplateValidateCommand extends AbstractValidateCommand {
@Inject
private YamlFlowParser yamlFlowParser;
@Inject
private ModelValidator modelValidator;
@@ -23,12 +26,12 @@ public class TemplateValidateCommand extends AbstractValidateCommand {
public Integer call() throws Exception {
return this.call(
Template.class,
yamlFlowParser,
modelValidator,
(Object object) -> {
Template template = (Template) object;
return template.getNamespace() + " / " + template.getId();
},
(Object object) -> Collections.emptyList(),
(Object object) -> Collections.emptyList()
);
}

View File

@@ -10,7 +10,7 @@ import picocli.CommandLine;
@CommandLine.Command(
name = "namespace",
description = "Manage namespace templates",
description = "handle namespace templates",
mixinStandardHelpOptions = true,
subcommands = {
TemplateNamespaceUpdateCommand.class,

View File

@@ -2,30 +2,34 @@ package io.kestra.cli.commands.templates.namespaces;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
import io.kestra.cli.commands.templates.TemplateValidateCommand;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.templates.TemplateEnabled;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.serializers.YamlFlowParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.nio.file.Files;
import java.util.List;
import java.util.stream.Collectors;
import jakarta.validation.ConstraintViolationException;
@CommandLine.Command(
name = "update",
description = "Update namespace templates",
description = "handle namespace templates",
mixinStandardHelpOptions = true
)
@Slf4j
@TemplateEnabled
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@Inject
public YamlFlowParser yamlFlowParser;
@Override
public Integer call() throws Exception {
@@ -34,8 +38,8 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
try (var files = Files.walk(directory)) {
List<Template> templates = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.map(path -> YamlParser.parse(path.toFile(), Template.class))
.filter(YamlFlowParser::isValidExtension)
.map(path -> yamlFlowParser.parse(path.toFile(), Template.class))
.toList();
if (templates.isEmpty()) {

View File

@@ -0,0 +1,153 @@
package io.kestra.cli.plugins;
import com.google.common.collect.ImmutableList;
import io.micronaut.context.annotation.Value;
import io.micronaut.core.annotation.Nullable;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.maven.repository.internal.MavenRepositorySystemUtils;
import org.eclipse.aether.DefaultRepositorySystemSession;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
import org.eclipse.aether.impl.DefaultServiceLocator;
import org.eclipse.aether.repository.LocalRepository;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.resolution.*;
import org.eclipse.aether.spi.connector.RepositoryConnectorFactory;
import org.eclipse.aether.spi.connector.transport.TransporterFactory;
import org.eclipse.aether.transport.file.FileTransporterFactory;
import org.eclipse.aether.transport.http.HttpTransporterFactory;
import org.eclipse.aether.util.repository.AuthenticationBuilder;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Singleton
@Slf4j
public class PluginDownloader {
private final List<RepositoryConfig> repositoryConfigs;
private final RepositorySystem system;
private final RepositorySystemSession session;
@Inject
public PluginDownloader(
List<RepositoryConfig> repositoryConfigs,
@Nullable @Value("${kestra.plugins.local-repository-path}") String localRepositoryPath
) {
this.repositoryConfigs = repositoryConfigs;
this.system = repositorySystem();
this.session = repositorySystemSession(system, localRepositoryPath);
}
public void addRepository(RepositoryConfig repositoryConfig) {
this.repositoryConfigs.add(repositoryConfig);
}
public List<URL> resolve(List<String> dependencies) throws MalformedURLException, ArtifactResolutionException, VersionRangeResolutionException {
List<RemoteRepository> repositories = remoteRepositories();
List<ArtifactResult> artifactResults = resolveArtifacts(repositories, dependencies);
List<URL> localUrls = resolveUrls(artifactResults);
log.debug("Resolved Plugin {} with {}", dependencies, localUrls);
return localUrls;
}
private List<RemoteRepository> remoteRepositories() {
return repositoryConfigs
.stream()
.map(repositoryConfig -> {
var build = new RemoteRepository.Builder(
repositoryConfig.getId(),
"default",
repositoryConfig.getUrl()
);
if (repositoryConfig.getBasicAuth() != null) {
var authenticationBuilder = new AuthenticationBuilder();
authenticationBuilder.addUsername(repositoryConfig.getBasicAuth().getUsername());
authenticationBuilder.addPassword(repositoryConfig.getBasicAuth().getPassword());
build.setAuthentication(authenticationBuilder.build());
}
return build.build();
})
.toList();
}
private static RepositorySystem repositorySystem() {
DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator();
locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
locator.addService(TransporterFactory.class, FileTransporterFactory.class);
locator.addService(TransporterFactory.class, HttpTransporterFactory.class);
return locator.getService(RepositorySystem.class);
}
private RepositorySystemSession repositorySystemSession(RepositorySystem system, String localRepositoryPath) {
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
if (localRepositoryPath == null) {
try {
final String tempDirectory = Files.createTempDirectory(this.getClass().getSimpleName().toLowerCase())
.toAbsolutePath()
.toString();
localRepositoryPath = tempDirectory;
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
FileUtils.deleteDirectory(new File(tempDirectory));
} catch (IOException e) {
throw new RuntimeException(e);
}
}));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
LocalRepository localRepo = new LocalRepository(localRepositoryPath);
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
return session;
}
private List<ArtifactResult> resolveArtifacts(List<RemoteRepository> repositories, List<String> dependencies) throws ArtifactResolutionException, VersionRangeResolutionException {
List<ArtifactResult> results = new ArrayList<>(dependencies.size());
for (String dependency: dependencies) {
var artifact = new DefaultArtifact(dependency);
var version = system.resolveVersionRange(session, new VersionRangeRequest(artifact, repositories, null));
var artifactRequest = new ArtifactRequest(
new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "jar", version.getHighestVersion().toString()),
repositories,
null
);
var artifactResult = system.resolveArtifact(session, artifactRequest);
results.add(artifactResult);
}
return results;
}
private List<URL> resolveUrls(List<ArtifactResult> artifactResults) throws MalformedURLException {
ImmutableList.Builder<URL> urls = ImmutableList.builder();
for (ArtifactResult artifactResult : artifactResults) {
URL url;
url = artifactResult.getArtifact().getFile().toPath().toUri().toURL();
urls.add(url);
}
return urls.build();
}
}

View File

@@ -0,0 +1,30 @@
package io.kestra.cli.plugins;
import io.micronaut.context.annotation.EachProperty;
import io.micronaut.context.annotation.Parameter;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
@EachProperty("kestra.plugins.repositories")
@Getter
@AllArgsConstructor
@Builder
public class RepositoryConfig {
String id;
String url;
BasicAuth basicAuth;
@Getter
@AllArgsConstructor
public static class BasicAuth {
private String username;
private String password;
}
public RepositoryConfig(@Parameter String id) {
this.id = id;
}
}

View File

@@ -1,260 +0,0 @@
package io.kestra.cli.services;
import io.kestra.core.exceptions.FlowProcessingException;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithPath;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.core.services.PluginDefaultService;
import io.micronaut.context.annotation.Requires;
import io.micronaut.context.annotation.Value;
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
import jakarta.inject.Inject;
import jakarta.annotation.Nullable;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@Singleton
@Slf4j
@Requires(property = "micronaut.io.watch.enabled", value = "true")
public class FileChangedEventListener {
@Nullable
private final FileWatchConfiguration fileWatchConfiguration;
@Nullable
private final WatchService watchService;
@Inject
private FlowRepositoryInterface flowRepositoryInterface;
@Inject
private PluginDefaultService pluginDefaultService;
@Inject
private ModelValidator modelValidator;
@Inject
protected FlowListenersInterface flowListeners;
@Nullable
@Value("${micronaut.io.watch.tenantId}")
private String tenantId;
FlowFilesManager flowFilesManager;
private List<FlowWithPath> flows = new ArrayList<>();
private boolean isStarted = false;
@Inject
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
this.fileWatchConfiguration = fileWatchConfiguration;
this.watchService = watchService;
}
public void startListeningFromConfig() throws IOException, InterruptedException {
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
List<Path> paths = fileWatchConfiguration.getPaths();
this.setup(paths);
flowListeners.run();
// Init existing flows not already in files
flowListeners.listen(flows -> {
if (!isStarted) {
for (FlowInterface flow : flows) {
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
flowToFile(flow, this.buildPath(flow));
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
}
}
this.isStarted = true;
}
});
// Listen for new/updated/deleted flows
flowListeners.listen((current, previous) -> {
// If deleted
if (current.isDeleted()) {
this.flows.stream().filter(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst()
.ifPresent(flowWithPath -> {
deleteFile(Paths.get(flowWithPath.getPath()));
});
this.flows.removeIf(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision()));
} else {
// if updated/created
Optional<FlowWithPath> flowWithPath = this.flows.stream().filter(fwp -> fwp.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst();
if (flowWithPath.isPresent()) {
flowToFile(current, Paths.get(flowWithPath.get().getPath()));
} else {
flows.add(FlowWithPath.of(current, this.buildPath(current).toString()));
flowToFile(current, null);
}
}
});
this.startListening(paths);
} else {
log.info("File watching is disabled.");
}
}
public void startListening(List<Path> paths) throws IOException, InterruptedException {
for (Path path : paths) {
path.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
}
WatchKey key;
while ((key = watchService.take()) != null) {
for (WatchEvent<?> watchEvent : key.pollEvents()) {
try {
WatchEvent.Kind<?> kind = watchEvent.kind();
Path entry = (Path) watchEvent.context();
if (entry.toString().endsWith(".yml") || entry.toString().endsWith(".yaml")) {
if (kind == StandardWatchEventKinds.ENTRY_CREATE || kind == StandardWatchEventKinds.ENTRY_MODIFY) {
Path filePath = ((Path) key.watchable()).resolve(entry);
if (Files.isDirectory(filePath)) {
loadFlowsFromFolder(filePath);
} else {
try {
String content = Files.readString(filePath, Charset.defaultCharset());
Optional<FlowWithSource> flow = parseFlow(content, entry);
if (flow.isPresent()) {
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
// Check if we already have a file with the given path
if (flows.stream().anyMatch(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()))) {
Optional<FlowWithPath> previous = flows.stream().filter(flowWithPath -> flowWithPath.getPath().equals(filePath.toString())).findFirst();
// Check if Flow from file has id/namespace updated
if (previous.isPresent() && !previous.get().uidWithoutRevision().equals(flow.get().uidWithoutRevision())) {
flows.removeIf(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()));
flowFilesManager.deleteFlow(previous.get().getTenantId(), previous.get().getNamespace(), previous.get().getId());
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
}
} else {
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
}
} else {
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
}
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
}
} catch (NoSuchFileException e) {
log.error("File not found: {}", entry, e);
} catch (IOException e) {
log.error("Error reading file: {}", entry, e);
}
}
} else {
Path filePath = ((Path) key.watchable()).resolve(entry);
flows.stream()
.filter(flow -> flow.getPath().equals(filePath.toString()))
.findFirst()
.ifPresent(flowWithPath -> {
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
});
}
}
} catch (Exception e) {
log.error("Unexpected error while watching flows", e);
}
}
key.reset();
}
}
private void setup(List<Path> folders) {
for (Path folder : folders) {
this.loadFlowsFromFolder(folder);
}
}
private void loadFlowsFromFolder(Path folder) {
try {
Files.walkFileTree(folder, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
dir.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
if (!dir.equals(folder)) {
loadFlowsFromFolder(dir);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) {
String content = Files.readString(file, Charset.defaultCharset());
Optional<FlowWithSource> flow = parseFlow(content, file);
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
flows.add(FlowWithPath.of(flow.get(), file.toString()));
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
}
}
return FileVisitResult.CONTINUE;
}
});
log.info("Loaded files from the folder {}", folder);
} catch (IOException e) {
log.error(e.getMessage());
}
}
private void flowToFile(FlowInterface flow, Path path) {
Path defaultPath = path != null ? path : this.buildPath(flow);
try {
Files.writeString(defaultPath, flow.source());
log.info("Flow {} has been written to file {}", flow.getId(), defaultPath);
} catch (IOException e) {
log.error("Error writing file: {}", defaultPath, e);
}
}
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
try {
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
modelValidator.validate(flow);
return Optional.of(flow);
} catch (ConstraintViolationException | FlowProcessingException e) {
log.warn("Error while parsing flow: {}", entry, e);
}
return Optional.empty();
}
private void deleteFile(Path file) {
try {
if (Files.deleteIfExists(file)) {
log.info("File {} has been deleted successfully.", file);
} else {
log.warn("File {} does not exist.", file);
}
} catch (IOException e) {
log.error("Error deleting file: {}", file, e);
}
}
private Path buildPath(FlowInterface flow) {
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
}
}

View File

@@ -1,13 +0,0 @@
package io.kestra.cli.services;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
public interface FlowFilesManager {
FlowWithSource createOrUpdateFlow(GenericFlow flow);
void deleteFlow(FlowWithSource toDelete);
void deleteFlow(String tenantId, String namespace, String id);
}

View File

@@ -1,34 +0,0 @@
package io.kestra.cli.services;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class LocalFlowFileWatcher implements FlowFilesManager {
private final FlowRepositoryInterface flowRepository;
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
this.flowRepository = flowRepository;
}
@Override
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
return flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId())
.map(previous -> flowRepository.update(flow, previous))
.orElseGet(() -> flowRepository.create(flow));
}
@Override
public void deleteFlow(FlowWithSource toDelete) {
flowRepository.findByIdWithSource(toDelete.getTenantId(), toDelete.getNamespace(), toDelete.getId()).ifPresent(flowRepository::delete);
log.info("Flow {} has been deleted", toDelete.getId());
}
@Override
public void deleteFlow(String tenantId, String namespace, String id) {
flowRepository.findByIdWithSource(tenantId, namespace, id).ifPresent(flowRepository::delete);
log.info("Flow {} has been deleted", id);
}
}

View File

@@ -1,9 +1,6 @@
micronaut:
application:
name: kestra
# Disable Micronaut Open Telemetry
otel:
enabled: false
router:
static-resources:
swagger:
@@ -26,11 +23,11 @@ micronaut:
netty:
max-chunk-size: 10MB
max-header-size: 32768 # increased from the default of 8k
responses:
file:
cache-seconds: 86400
cache-control:
public: true
responses:
file:
cache-seconds: 86400
cache-control:
public: true
# Access log configuration, see https://docs.micronaut.io/latest/guide/index.html#accessLogger
access-logger:
@@ -40,7 +37,6 @@ micronaut:
exclusions:
- /ui/.+
- /health
- /health/.+
- /prometheus
http-version: HTTP_1_1
caches:
@@ -74,13 +70,6 @@ micronaut:
type: scheduled
core-pool-size: 1
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
metrics:
export:
otlp:
enabled: false
# url: http://localhost:4318/v1/metrics
jackson:
serialization:
writeDatesAsTimestamps: false
@@ -135,21 +124,15 @@ kestra:
delay: 1s
maxDelay: ""
jdbc:
queues:
min-poll-interval: 25ms
max-poll-interval: 500ms
poll-switch-interval: 60s
max-poll-interval: 1000ms
poll-switch-interval: 5s
cleaner:
initial-delay: 1h
fixed-delay: 1h
retention: 7d
types:
- type : io.kestra.core.models.executions.LogEntry
retention: 1h
- type: io.kestra.core.models.executions.MetricEntry
retention: 1h
plugins:
repositories:
@@ -168,16 +151,13 @@ kestra:
values:
recoverMissedSchedules: ALL
variables:
env-vars-prefix: ENV_
env-vars-prefix: KESTRA_
cache-enabled: true
cache-size: 1000
metrics:
prefix: kestra
traces:
root: DISABLED
server:
basic-auth:
enabled: false
@@ -195,34 +175,15 @@ kestra:
liveness:
enabled: true
# The expected time between liveness probe.
interval: 10s
interval: 5s
# The timeout used to detect service failures.
timeout: 1m
timeout: 45s
# The time to wait before executing a liveness probe.
initialDelay: 1m
initialDelay: 45s
# The expected time between service heartbeats.
heartbeatInterval: 3s
service:
purge:
initial-delay: 1h
fixed-delay: 1d
retention: 30d
anonymous-usage-report:
enabled: true
uri: https://api.kestra.io/v1/reports/usages
initial-delay: 5m
fixed-delay: 1h
hidden-labels:
prefixes:
- system.
- internal.
otel:
exclusions:
- /ping
- /metrics
- /health
- /env
- /prometheus
propagators: tracecontext, baggage

View File

@@ -13,7 +13,8 @@ import picocli.CommandLine;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.junit.jupiter.api.Assertions.assertTrue;
class AppTest {
@@ -25,7 +26,7 @@ class AppTest {
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
PicocliRunner.call(App.class, ctx, "--help");
assertThat(out.toString()).contains("kestra");
assertThat(out.toString(), containsString("kestra"));
}
}
@@ -41,23 +42,7 @@ class AppTest {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
assertThat(out.toString()).startsWith("Usage: kestra server " + serverType);
}
}
@Test
void missingRequiredParamsPrintHelpInsteadOfException() {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setErr(new PrintStream(out));
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
assertThat(out.toString()).startsWith("Missing required parameters: ");
assertThat(out.toString()).contains("Usage: kestra flow namespace update ");
assertThat(out.toString()).doesNotContain("MissingParameterException: ");
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
}
}
}

Some files were not shown because too many files have changed in this diff Show More