mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-29 09:00:26 -05:00
Compare commits
161 Commits
chore/micr
...
v0.19.26
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac7a9c61d9 | ||
|
|
1bfdae7506 | ||
|
|
6a232bf7c6 | ||
|
|
f0acc68a48 | ||
|
|
18391f535f | ||
|
|
c6c2c974ee | ||
|
|
c008920845 | ||
|
|
ae2a4db153 | ||
|
|
fde37e4b30 | ||
|
|
705e834c8a | ||
|
|
8f123b1da5 | ||
|
|
259a9036b9 | ||
|
|
49fd7e411f | ||
|
|
3b5527b872 | ||
|
|
c14d9573d3 | ||
|
|
2dff7be851 | ||
|
|
9ceb633e97 | ||
|
|
434f0caf9a | ||
|
|
d00a8f549f | ||
|
|
c8a829081c | ||
|
|
86117b63b6 | ||
|
|
f3bb05a6d5 | ||
|
|
317cc02d77 | ||
|
|
b0ae3b643c | ||
|
|
338cc8c38c | ||
|
|
9231c28bb5 | ||
|
|
5a722cdb6c | ||
|
|
da47b05d61 | ||
|
|
97d1267852 | ||
|
|
afe41c1600 | ||
|
|
a12fa5b82b | ||
|
|
b3376d2183 | ||
|
|
dd6d8a7c22 | ||
|
|
cb3e73c651 | ||
|
|
f95f47ae0c | ||
|
|
e042a0a4b3 | ||
|
|
f92bfe9e98 | ||
|
|
7ef33e35f1 | ||
|
|
bc27733149 | ||
|
|
355543d4f7 | ||
|
|
12474e118a | ||
|
|
7c31e0306c | ||
|
|
3aace06d31 | ||
|
|
dcd697bcac | ||
|
|
5fc3f769da | ||
|
|
0d52d9a6a9 | ||
|
|
85f11feace | ||
|
|
523294ce8c | ||
|
|
b15bc9bacd | ||
|
|
74f28be32e | ||
|
|
2a714791a1 | ||
|
|
f9a547ed63 | ||
|
|
a817970c1c | ||
|
|
c9e8b7ea06 | ||
|
|
8f4a9e2dc8 | ||
|
|
911ae32113 | ||
|
|
928214a22b | ||
|
|
d5eafa69aa | ||
|
|
ae48352300 | ||
|
|
a59f758d28 | ||
|
|
befbefbdd9 | ||
|
|
57c7389f9e | ||
|
|
427da64744 | ||
|
|
430dc8ecee | ||
|
|
2d9c98b921 | ||
|
|
a49b406f03 | ||
|
|
2a578fe651 | ||
|
|
277bf77fb4 | ||
|
|
e6ec59443a | ||
|
|
b68b281ac0 | ||
|
|
37bf6ea8f3 | ||
|
|
71a296a814 | ||
|
|
6d8bc07f5b | ||
|
|
07974aa145 | ||
|
|
c7288bd325 | ||
|
|
96f553c1ba | ||
|
|
8389102706 | ||
|
|
16a0096c45 | ||
|
|
6327dcd51b | ||
|
|
e91beaa15f | ||
|
|
833bdb38ee | ||
|
|
0d64c74a67 | ||
|
|
4740fa3628 | ||
|
|
b29965c239 | ||
|
|
05d1eeadef | ||
|
|
acd2ce9041 | ||
|
|
a3829c3d7e | ||
|
|
17c18f94dd | ||
|
|
14daa96295 | ||
|
|
aa9aa80f0a | ||
|
|
705d17340d | ||
|
|
cf70c99e59 | ||
|
|
c5e0cddca5 | ||
|
|
4d14464191 | ||
|
|
ed12797b46 | ||
|
|
ec85a748ce | ||
|
|
3e8a63888a | ||
|
|
8d0bcc1da3 | ||
|
|
0b53f1cf25 | ||
|
|
3621aad6a1 | ||
|
|
dbb1cc5007 | ||
|
|
0d6e655b22 | ||
|
|
7a1a180fdb | ||
|
|
ce2daf52ff | ||
|
|
f086da3a2a | ||
|
|
1886a443c7 | ||
|
|
5a4e2b791d | ||
|
|
a595cecb3d | ||
|
|
472b699ca7 | ||
|
|
f55f52b43a | ||
|
|
c796308839 | ||
|
|
37a880164d | ||
|
|
5f1408c560 | ||
|
|
4186900fdb | ||
|
|
4338437a6f | ||
|
|
68ee5e4df0 | ||
|
|
2def5cf7f8 | ||
|
|
d184858abf | ||
|
|
dfa5875fa1 | ||
|
|
ac4f7f261d | ||
|
|
ae55685d2e | ||
|
|
dd34317e4f | ||
|
|
f95e3073dd | ||
|
|
9f20988997 | ||
|
|
5da3ab4f71 | ||
|
|
243eaab826 | ||
|
|
6d362d688d | ||
|
|
39a01e0e7d | ||
|
|
a44b2ef7cb | ||
|
|
6bcad13444 | ||
|
|
02acf01ea5 | ||
|
|
55193361b8 | ||
|
|
8d509a3ba5 | ||
|
|
500680bcf7 | ||
|
|
412c27cb12 | ||
|
|
8d7d9a356f | ||
|
|
d2ab2e97b4 | ||
|
|
6a0f360fc6 | ||
|
|
0484fd389a | ||
|
|
e92aac3b39 | ||
|
|
39b8ac8804 | ||
|
|
f928ed5876 | ||
|
|
54856af0a8 | ||
|
|
8bd79e82ab | ||
|
|
104a491b92 | ||
|
|
5f46a0dd16 | ||
|
|
24c3703418 | ||
|
|
e5af245855 | ||
|
|
d58e8f98a2 | ||
|
|
ce2f1bfdb3 | ||
|
|
b619f88eff | ||
|
|
1f1775752b | ||
|
|
b2475e53a2 | ||
|
|
7e8956a0b7 | ||
|
|
6537ee984b | ||
|
|
573aa48237 | ||
|
|
66ddeaa219 | ||
|
|
02c5e8a1a2 | ||
|
|
733c7897b9 | ||
|
|
c051287688 | ||
|
|
1af8de6bce |
@@ -1,82 +0,0 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ARG BUILDPLATFORM
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
USER root
|
||||
WORKDIR /root
|
||||
|
||||
RUN apt update && apt install -y \
|
||||
apt-transport-https ca-certificates gnupg curl wget git zip unzip less zsh net-tools iputils-ping jq lsof
|
||||
|
||||
ENV HOME="/root"
|
||||
|
||||
# --------------------------------------
|
||||
# Git
|
||||
# --------------------------------------
|
||||
# Need to add the devcontainer workspace folder as a safe directory to enable git
|
||||
# version control system to be enabled in the containers file system.
|
||||
RUN git config --global --add safe.directory "/workspaces/kestra"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Oh my zsh
|
||||
# --------------------------------------
|
||||
RUN sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" -- \
|
||||
-t robbyrussell \
|
||||
-p git -p node -p npm
|
||||
|
||||
ENV SHELL=/bin/zsh
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Java
|
||||
# --------------------------------------
|
||||
ARG OS_ARCHITECTURE
|
||||
|
||||
RUN mkdir -p /usr/java
|
||||
RUN echo "Building on platform: $BUILDPLATFORM"
|
||||
RUN case "$BUILDPLATFORM" in \
|
||||
"linux/amd64") OS_ARCHITECTURE="x64_linux" ;; \
|
||||
"linux/arm64") OS_ARCHITECTURE="aarch64_linux" ;; \
|
||||
"darwin/amd64") OS_ARCHITECTURE="x64_mac" ;; \
|
||||
"darwin/arm64") OS_ARCHITECTURE="aarch64_mac" ;; \
|
||||
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
|
||||
esac && \
|
||||
wget "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz" && \
|
||||
mv OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz openjdk-21.0.7.tar.gz
|
||||
RUN tar -xzvf openjdk-21.0.7.tar.gz && \
|
||||
mv jdk-21.0.7+6 jdk-21 && \
|
||||
mv jdk-21 /usr/java/
|
||||
ENV JAVA_HOME=/usr/java/jdk-21
|
||||
ENV PATH="$PATH:$JAVA_HOME/bin"
|
||||
# Will load a custom configuration file for Micronaut
|
||||
ENV MICRONAUT_ENVIRONMENTS=local,override
|
||||
# Sets the path where you save plugins as Jar and is loaded during the startup process
|
||||
ENV KESTRA_PLUGINS_PATH="/workspaces/kestra/local/plugins"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Node.js
|
||||
# --------------------------------------
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_22.x -o nodesource_setup.sh \
|
||||
&& bash nodesource_setup.sh && apt install -y nodejs
|
||||
# Increases JavaScript heap memory to 4GB to prevent heap out of error during startup
|
||||
ENV NODE_OPTIONS=--max-old-space-size=4096
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Python
|
||||
# --------------------------------------
|
||||
RUN apt install -y python3 pip python3-venv
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# SSH
|
||||
# --------------------------------------
|
||||
RUN mkdir -p ~/.ssh
|
||||
RUN touch ~/.ssh/config
|
||||
RUN echo "Host github.com" >> ~/.ssh/config \
|
||||
&& echo " IdentityFile ~/.ssh/id_ed25519" >> ~/.ssh/config
|
||||
RUN touch ~/.ssh/id_ed25519
|
||||
# --------------------------------------
|
||||
@@ -1,149 +0,0 @@
|
||||
# Kestra Devcontainer
|
||||
|
||||
This devcontainer provides a quick and easy setup for anyone using VSCode to get up and running quickly with this project to start development on either the frontend or backend. It bootstraps a docker container for you to develop inside of without the need to manually setup the environment.
|
||||
|
||||
---
|
||||
|
||||
## INSTRUCTIONS
|
||||
|
||||
### Setup:
|
||||
|
||||
Take a look at this guide to get an idea of what the setup is like as this devcontainer setup follows this approach: https://kestra.io/docs/getting-started/contributing
|
||||
|
||||
Once you have this repo cloned to your local system, you will need to install the VSCode extension [Remote Development](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack).
|
||||
|
||||
Then run the following command from the command palette:
|
||||
`Dev Containers: Open Folder in Container...` and select your Kestra root folder.
|
||||
|
||||
This will then put you inside a docker container ready for development.
|
||||
|
||||
NOTE: you'll need to wait for the gradle build to finish and compile Java files but this process should happen automatically within VSCode.
|
||||
|
||||
In the meantime, you can move onto the next step...
|
||||
|
||||
---
|
||||
|
||||
### Development:
|
||||
|
||||
- Create a `.env.development.local` file in the `ui` folder and paste the following:
|
||||
|
||||
```bash
|
||||
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
|
||||
VITE_APP_API_URL=http://localhost:8080
|
||||
```
|
||||
|
||||
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
|
||||
|
||||
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
|
||||
|
||||
Now you have two choices:
|
||||
|
||||
`Local mode`:
|
||||
|
||||
Runs the Kestra server in local mode which uses a H2 database, so this is the only config you'd need:
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
You can then open a new terminal and run the following command to start the backend server: `./gradlew runLocal`
|
||||
|
||||
`Standalone mode`:
|
||||
|
||||
Runs in standalone mode which uses Postgres. Make sure to have a local Postgres instance already running on localhost:
|
||||
|
||||
```yaml
|
||||
kestra:
|
||||
repository:
|
||||
type: postgres
|
||||
storage:
|
||||
type: local
|
||||
local:
|
||||
base-path: "/app/storage"
|
||||
queue:
|
||||
type: postgres
|
||||
tasks:
|
||||
tmp-dir:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
# It is important to note that you must use the "host.docker.internal" host when connecting to a docker container outside of your devcontainer as attempting to use localhost will only point back to this devcontainer.
|
||||
url: jdbc:postgresql://host.docker.internal:5432/kestra
|
||||
driverClassName: org.postgresql.Driver
|
||||
username: kestra
|
||||
password: k3str4
|
||||
|
||||
flyway:
|
||||
datasources:
|
||||
postgres:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
# We must ignore missing migrations as we may delete the wrong ones or delete those that are not used anymore.
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
Then add the following settings to the `.vscode/launch.json` file:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "java",
|
||||
"name": "Kestra Standalone",
|
||||
"request": "launch",
|
||||
"mainClass": "io.kestra.cli.App",
|
||||
"projectName": "cli",
|
||||
"args": "server standalone"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
You can then use the VSCode `Run and Debug` extension to start the Kestra server.
|
||||
|
||||
Additionally, if you're doing frontend development, you can run `npm run dev` from the `ui` folder after having the above running (which will provide a backend) to access your application from `localhost:5173`. This has the benefit to watch your changes and hot-reload upon doing frontend changes.
|
||||
|
||||
#### Plugins
|
||||
If you want your plugins to be loaded inside your devcontainer, point the `source` field to a folder containing jars of the plugins you want to embed in the following snippet in `devcontainer.json`:
|
||||
```
|
||||
"mounts": [
|
||||
{
|
||||
"source": "/absolute/path/to/your/local/jar/plugins/folder",
|
||||
"target": "/workspaces/kestra/local/plugins",
|
||||
"type": "bind"
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GIT
|
||||
|
||||
If you want to commit to GitHub, make sure to navigate to the `~/.ssh` folder and either create a new SSH key or override the existing `id_ed25519` file and paste an existing SSH key from your local machine into this file. You will then need to change the permissions of the file by running: `chmod 600 id_ed25519`. This will allow you to then push to GitHub.
|
||||
|
||||
---
|
||||
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"name": "kestra",
|
||||
"build": {
|
||||
"context": ".",
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"workspaceFolder": "/workspaces/kestra",
|
||||
"forwardPorts": [5173, 8080],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/bin/zsh"
|
||||
}
|
||||
},
|
||||
"workbench.iconTheme": "vscode-icons",
|
||||
"editor.tabSize": 4,
|
||||
"editor.formatOnSave": true,
|
||||
"files.insertFinalNewline": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"telemetry.telemetryLevel": "off",
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": "active"
|
||||
},
|
||||
"extensions": [
|
||||
"redhat.vscode-yaml",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"eamodio.gitlens",
|
||||
"esbenp.prettier-vscode",
|
||||
"aaron-bond.better-comments",
|
||||
"codeandstuff.package-json-upgrade",
|
||||
"andys8.jest-snippets",
|
||||
"oderwat.indent-rainbow",
|
||||
"evondev.indent-rainbow-palettes",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"IronGeek.vscode-env",
|
||||
"yoavbls.pretty-ts-errors",
|
||||
"github.vscode-github-actions",
|
||||
"vscjava.vscode-java-pack",
|
||||
"docker.docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
26
.github/CONTRIBUTING.md
vendored
26
.github/CONTRIBUTING.md
vendored
@@ -31,16 +31,12 @@ Watch out for duplicates! If you are creating a new issue, please check existing
|
||||
|
||||
#### Requirements
|
||||
The following dependencies are required to build Kestra locally:
|
||||
- Java 21+
|
||||
- Node 18+ and npm
|
||||
- Java 17+, Kestra runs on Java 11 but we hit a Java compiler bug fixed in Java 17
|
||||
- Node 14+ and npm
|
||||
- Python 3, pip and python venv
|
||||
- Docker & Docker Compose
|
||||
- an IDE (Intellij IDEA, Eclipse or VS Code)
|
||||
|
||||
Thanks to the Kestra community, if using VSCode, you can also start development on either the frontend or backend with a bootstrapped docker container without the need to manually set up the environment.
|
||||
|
||||
Check out the [README](../.devcontainer/README.md) for set-up instructions and the associated [Dockerfile](../.devcontainer/Dockerfile) in the respository to get started.
|
||||
|
||||
To start contributing:
|
||||
- [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the repository
|
||||
- Clone the fork on your workstation:
|
||||
@@ -50,7 +46,7 @@ git clone git@github.com:{YOUR_USERNAME}/kestra.git
|
||||
cd kestra
|
||||
```
|
||||
|
||||
#### Develop on the backend
|
||||
#### Develop backend
|
||||
The backend is made with [Micronaut](https://micronaut.io).
|
||||
|
||||
Open the cloned repository in your favorite IDE. In most of decent IDEs, Gradle build will be detected and all dependencies will be downloaded.
|
||||
@@ -63,7 +59,7 @@ You can also build it from a terminal using `./gradlew build`, the Gradle wrappe
|
||||
- Configure the following environment variables:
|
||||
- `MICRONAUT_ENVIRONMENTS`: can be set to any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`.
|
||||
- `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on startup.
|
||||
- See the screenshot below for an example: 
|
||||
- See the screenshot bellow for an example: 
|
||||
- If you encounter **JavaScript memory heap out** error during startup, configure `NODE_OPTIONS` environment variable with some large value.
|
||||
- Example `NODE_OPTIONS: --max-old-space-size=4096` or `NODE_OPTIONS: --max-old-space-size=8192` 
|
||||
- The server starts by default on port 8080 and is reachable on `http://localhost:8080`
|
||||
@@ -76,17 +72,17 @@ python3 -m pip install virtualenv
|
||||
```
|
||||
|
||||
|
||||
#### Develop on the frontend
|
||||
#### Develop frontend
|
||||
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
|
||||
|
||||
- `npm install`
|
||||
- create a file `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
|
||||
- `npm install --force` (force is need because of some conflicting package)
|
||||
- create a files `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
|
||||
- `npm run dev` will start the development server with hot reload.
|
||||
- The server start by default on port 5173 and is reachable on `http://localhost:5173`
|
||||
- The server start by default on port 8090 and is reachable on `http://localhost:5173`
|
||||
- You can run `npm run build` in order to build the front-end that will be delivered from the backend (without running the `npm run dev`) above.
|
||||
|
||||
Now, you need to start a backend server, you could:
|
||||
- start a [local server](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies) without a database using this docker-compose file already configured with CORS enabled:
|
||||
- start a [local server](https://kestra.io/docs/administrator-guide/server-cli#kestra-local-development-server-with-no-dependencies) without database using this docker-compose file already configured with CORS enabled:
|
||||
```yaml
|
||||
services:
|
||||
kestra:
|
||||
@@ -106,7 +102,7 @@ services:
|
||||
ports:
|
||||
- "8080:8080"
|
||||
```
|
||||
- start the [Develop backend](#develop-backend) from your IDE, you need to configure CORS restrictions when using the local development npm server, changing the backend configuration allowing the http://localhost:5173 origin in `cli/src/main/resources/application-override.yml`
|
||||
- start the [Develop backend](#develop-backend) from your IDE and you need to configure CORS restrictions when using the local development npm server, changing the backend configuration allowing the http://localhost:5173 origin in `cli/src/main/resources/application-override.yml`
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
@@ -140,4 +136,4 @@ A complete documentation for developing plugin can be found [here](https://kestr
|
||||
|
||||
### Improving The Documentation
|
||||
The main documentation is located in a separate [repository](https://github.com/kestra-io/kestra.io).
|
||||
For tasks documentation, they are located directly in the Java source, using [Swagger annotations](https://github.com/swagger-api/swagger-core/wiki/Swagger-2.X---Annotations) (Example: [for Bash tasks](https://github.com/kestra-io/kestra/blob/develop/core/src/main/java/io/kestra/core/tasks/scripts/AbstractBash.java))
|
||||
For tasks documentation, they are located directly on Java source using [Swagger annotations](https://github.com/swagger-api/swagger-core/wiki/Swagger-2.X---Annotations) (Example: [for Bash tasks](https://github.com/kestra-io/kestra/blob/develop/core/src/main/java/io/kestra/core/tasks/scripts/AbstractBash.java))
|
||||
|
||||
54
.github/ISSUE_TEMPLATE/blueprint.yml
vendored
Normal file
54
.github/ISSUE_TEMPLATE/blueprint.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
name: Blueprint
|
||||
description: Add a new blueprint
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please fill out all the fields listed below. This will help us review and add your blueprint faster.
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Blueprint title
|
||||
description: A title briefly describing what the blueprint does, ideally in a verb phrase + noun format.
|
||||
placeholder: E.g. "Upload a file to service X, then run Y and Z"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Source code
|
||||
description: Flow code that will appear on the Blueprint page.
|
||||
placeholder: |
|
||||
```yaml
|
||||
id: yourFlowId
|
||||
namespace: blueprint
|
||||
tasks:
|
||||
- id: taskName
|
||||
type: task_type
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: About this blueprint
|
||||
description: "A concise markdown documentation about the blueprint's configuration and usage."
|
||||
placeholder: |
|
||||
E.g. "This flow downloads a file and uploads it to an S3 bucket. This flow assumes AWS credentials stored as environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`."
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Tags (optional)
|
||||
description: Blueprint categories such as Ingest, Transform, Analyze, Python, Docker, AWS, GCP, Azure, etc.
|
||||
placeholder: |
|
||||
- Ingest
|
||||
- Transform
|
||||
- AWS
|
||||
validations:
|
||||
required: false
|
||||
|
||||
labels:
|
||||
- blueprint
|
||||
12
.github/ISSUE_TEMPLATE/bug.yml
vendored
12
.github/ISSUE_TEMPLATE/bug.yml
vendored
@@ -4,7 +4,9 @@ body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack).
|
||||
Thanks for reporting an issue! Please provide a [Minima Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example)
|
||||
and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs.
|
||||
NOTE: If your issue is more of a question, please ping us directly on [Slack](https://kestra.io/slack).
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the issue
|
||||
@@ -17,10 +19,10 @@ body:
|
||||
label: Environment
|
||||
description: Environment information where the problem occurs.
|
||||
value: |
|
||||
- Kestra Version: develop
|
||||
- Kestra Version:
|
||||
- Operating System (OS/Docker/Kubernetes):
|
||||
- Java Version (if you don't run kestra in Docker):
|
||||
validations:
|
||||
required: false
|
||||
labels:
|
||||
- bug
|
||||
- area/backend
|
||||
- area/frontend
|
||||
- bug
|
||||
8
.github/ISSUE_TEMPLATE/feature.yml
vendored
8
.github/ISSUE_TEMPLATE/feature.yml
vendored
@@ -1,13 +1,15 @@
|
||||
name: Feature request
|
||||
description: Create a new feature request
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please describe the feature you want for Kestra to implement, before that check if there is already an existing issue to add it.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Feature description
|
||||
placeholder: Tell us more about your feature request
|
||||
placeholder: Tell us what feature you would like for Kestra to have and what problem is it going to solve
|
||||
validations:
|
||||
required: true
|
||||
labels:
|
||||
- enhancement
|
||||
- area/backend
|
||||
- area/frontend
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/other.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/other.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
name: Other
|
||||
description: Something different
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Issue description
|
||||
validations:
|
||||
required: true
|
||||
7
.github/actions/plugins-list/action.yml
vendored
7
.github/actions/plugins-list/action.yml
vendored
@@ -13,9 +13,6 @@ outputs:
|
||||
plugins:
|
||||
description: "List of all Kestra plugins"
|
||||
value: ${{ steps.plugins.outputs.plugins }}
|
||||
repositories:
|
||||
description: "List of all Kestra repositories of plugins"
|
||||
value: ${{ steps.plugins.outputs.repositories }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -23,7 +20,5 @@ runs:
|
||||
id: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | cut -d':' -f2- | xargs || echo '');
|
||||
REPOSITORIES=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | cut -d':' -f1 | uniq | sort | xargs || echo '')
|
||||
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | xargs || echo '');
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
echo "repositories=$REPOSITORIES" >> $GITHUB_OUTPUT
|
||||
|
||||
30
.github/dependabot.yml
vendored
30
.github/dependabot.yml
vendored
@@ -1,31 +1,26 @@
|
||||
# See GitHub's docs for more information on this file:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain dependencies for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every week
|
||||
interval: "weekly"
|
||||
day: "wednesday"
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
open-pull-requests-limit: 50
|
||||
|
||||
# Maintain dependencies for Gradle modules
|
||||
- package-ecosystem: "gradle"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to Gradle modules every week
|
||||
interval: "weekly"
|
||||
day: "wednesday"
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
open-pull-requests-limit: 50
|
||||
|
||||
# Maintain dependencies for NPM modules
|
||||
- package-ecosystem: "npm"
|
||||
@@ -36,15 +31,12 @@ updates:
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
labels: ["dependency-upgrade"]
|
||||
reviewers: ["MilosPaunovic"]
|
||||
ignore:
|
||||
# Ignore updates of version 1.x, as we're using the beta of 2.x (still in beta)
|
||||
# Ignore updates of version 1.x, as we're using beta of 2.x
|
||||
- dependency-name: "vue-virtual-scroller"
|
||||
versions:
|
||||
- "1.x"
|
||||
|
||||
# Ignore updates to monaco-yaml, version is pinned to 5.3.1 due to patch-package script additions
|
||||
- dependency-name: "monaco-yaml"
|
||||
versions:
|
||||
- ">=5.3.2"
|
||||
versions: ["1.x"]
|
||||
# Ignore major versions greater than 8, as it's still known to be flaky
|
||||
- dependency-name: "eslint"
|
||||
versions: [">8"]
|
||||
|
||||
BIN
.github/run-app.png
vendored
BIN
.github/run-app.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 210 KiB |
67
.github/workflows/auto-translate-ui-keys.yml
vendored
67
.github/workflows/auto-translate-ui-keys.yml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Auto-Translate UI keys and create PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9-21 * * *" # Every hour from 9 AM to 9 PM
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retranslate_modified_keys:
|
||||
description: "Whether to re-translate modified keys even if they already have translations."
|
||||
type: choice
|
||||
options:
|
||||
- "false"
|
||||
- "true"
|
||||
default: "false"
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
translations:
|
||||
name: Translations
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Commit and create PR
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="chore/update-translations-$(date +%s)"
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
|
||||
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
|
||||
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
117
.github/workflows/check.yml
vendored
Normal file
117
.github/workflows/check.yml
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
name: Daily Core check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
env:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
name: Check & Publish
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Build the docker-compose stack
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
|
||||
# Gradle check
|
||||
- name: Build with Gradle
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel --refresh-dependencies
|
||||
|
||||
# Sonar
|
||||
- name: Analyze with Sonar
|
||||
if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v4
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.7.1
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: 'Set up Cloud SDK'
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'Copy jacoco files'
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'jacoco') }}
|
||||
|
||||
|
||||
# report test
|
||||
- name: Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
if: success() || failure()
|
||||
with:
|
||||
report_paths: '**/build/test-results/**/TEST-*.xml'
|
||||
|
||||
# Codecov
|
||||
- uses: codecov/codecov-action@v4
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: always() && job.status != 'success'
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
@@ -6,11 +6,11 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
schedule:
|
||||
- cron: '0 5 * * 1'
|
||||
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -51,23 +51,13 @@ jobs:
|
||||
# Set up JDK
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v4
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: 21
|
||||
|
||||
- name: Setup gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
uses: gradle/actions/setup-gradle@v4
|
||||
|
||||
- name: Build with Gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
run: ./gradlew testClasses -x :ui:assembleFrontend
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
if: ${{ matrix.language != 'java' }}
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
28
.github/workflows/docker.yml
vendored
28
.github/workflows/docker.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Create Docker images on Release
|
||||
name: Create Docker images on tag
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -11,10 +11,6 @@ on:
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
release-tag:
|
||||
description: 'Kestra Release Tag'
|
||||
required: false
|
||||
type: string
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
@@ -42,16 +38,17 @@ jobs:
|
||||
name: Publish Docker
|
||||
needs: [ plugins ]
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: jattach
|
||||
packages: ""
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python-is-python3 python3-pip curl jattach
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
python-libs: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -60,22 +57,13 @@ jobs:
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
if [[ "${{ inputs.release-tag }}" == "" ]]; then
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="${{ inputs.release-tag }}"
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
# Download release
|
||||
- name: Download release
|
||||
uses: robinraju/release-downloader@v1.12
|
||||
uses: robinraju/release-downloader@v1.11
|
||||
with:
|
||||
tag: ${{steps.vars.outputs.tag}}
|
||||
fileName: 'kestra-*'
|
||||
|
||||
197
.github/workflows/e2e.yml
vendored
197
.github/workflows/e2e.yml
vendored
@@ -1,77 +1,164 @@
|
||||
name: 'E2E tests revival'
|
||||
description: 'New E2E tests implementation started by Roman. Based on playwright in npm UI project, tests Kestra OSS develop docker image. These tests are written from zero, lets make them unflaky from the start!.'
|
||||
name: 'Reusable Workflow for Running End-to-End Tests'
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *" # Every hour
|
||||
workflow_call:
|
||||
inputs:
|
||||
noInputYet:
|
||||
description: 'not input yet.'
|
||||
required: false
|
||||
tags:
|
||||
description: "Tags used for filtering tests to include for QA."
|
||||
type: string
|
||||
default: "no input"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
noInputYet:
|
||||
description: 'not input yet.'
|
||||
required: false
|
||||
required: true
|
||||
docker-artifact-name:
|
||||
description: "The GitHub artifact containing the Kestra docker image."
|
||||
type: string
|
||||
default: "no input"
|
||||
required: false
|
||||
docker-image-tag:
|
||||
description: "The Docker image Tag for Kestra"
|
||||
default: 'kestra/kestra:develop'
|
||||
type: string
|
||||
required: true
|
||||
backend:
|
||||
description: "The Kestra backend type to be used for E2E tests."
|
||||
type: string
|
||||
required: true
|
||||
default: "postgres"
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: "The Google Service Account."
|
||||
required: false
|
||||
jobs:
|
||||
check:
|
||||
timeout-minutes: 10
|
||||
timeout-minutes: 60
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
E2E_TEST_DOCKER_DIR: ./kestra/e2e-tests/docker
|
||||
KESTRA_BASE_URL: http://127.27.27.27:8080/ui/
|
||||
steps:
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
# Checkout kestra
|
||||
- name: Checkout kestra
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: kestra
|
||||
|
||||
- name: Install Npm dependencies
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Docker Image
|
||||
- name: Download Kestra Image
|
||||
if: inputs.docker-artifact-name != ''
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.docker-artifact-name }}
|
||||
path: /tmp
|
||||
|
||||
- name: Load Kestra Image
|
||||
if: inputs.docker-artifact-name != ''
|
||||
run: |
|
||||
cd kestra/ui
|
||||
npm i
|
||||
npx playwright install --with-deps chromium
|
||||
docker load --input /tmp/${{ inputs.docker-artifact-name }}.tar
|
||||
|
||||
# Docker Compose
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
if: inputs.docker-artifact-name == ''
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
# Build configuration
|
||||
- name: Create additional application configuration
|
||||
run: |
|
||||
touch ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
|
||||
|
||||
- name: Setup additional application configuration
|
||||
if: env.APPLICATION_SECRETS != null
|
||||
env:
|
||||
APPLICATION_SECRETS: ${{ secrets.APPLICATION_SECRETS }}
|
||||
run: |
|
||||
echo $APPLICATION_SECRETS | base64 -d > ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
|
||||
|
||||
# Deploy Docker Compose Stack
|
||||
- name: Run Kestra (${{ inputs.backend }})
|
||||
env:
|
||||
KESTRA_DOCKER_IMAGE: ${{ inputs.docker-image-tag }}
|
||||
run: |
|
||||
cd ${{ env.E2E_TEST_DOCKER_DIR }}
|
||||
echo "KESTRA_DOCKER_IMAGE=$KESTRA_DOCKER_IMAGE" >> .env
|
||||
docker compose -f docker-compose-${{ inputs.backend }}.yml up -d
|
||||
|
||||
- name: Install Playwright Deps
|
||||
run: |
|
||||
cd kestra
|
||||
./gradlew playwright --args="install-deps"
|
||||
|
||||
# Run E2E Tests
|
||||
- name: Wait For Kestra UI
|
||||
run: |
|
||||
# Start time
|
||||
START_TIME=$(date +%s)
|
||||
# Timeout duration in seconds (5 minutes)
|
||||
TIMEOUT_DURATION=$((5 * 60))
|
||||
while [ $(curl -s -L -o /dev/null -w %{http_code} $KESTRA_BASE_URL) != 200 ]; do
|
||||
echo -e $(date) "\tKestra server HTTP state: " $(curl -k -L -s -o /dev/null -w %{http_code} $KESTRA_BASE_URL) " (waiting for 200)";
|
||||
# Check the elapsed time
|
||||
CURRENT_TIME=$(date +%s)
|
||||
ELAPSED_TIME=$((CURRENT_TIME - START_TIME))
|
||||
# Break the loop if the elapsed time exceeds the timeout duration
|
||||
if [ $ELAPSED_TIME -ge $TIMEOUT_DURATION ]; then
|
||||
echo "Timeout reached: Exiting after 5 minutes."
|
||||
exit 1;
|
||||
fi
|
||||
sleep 2;
|
||||
done;
|
||||
echo "Kestra is running: $KESTRA_BASE_URL 🚀";
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run E2E Tests (${{ inputs.tags }})
|
||||
if: inputs.tags != ''
|
||||
run: |
|
||||
cd kestra
|
||||
./gradlew e2eTestsCheck -P tags=${{ inputs.tags }}
|
||||
|
||||
- name: Run E2E Tests
|
||||
if: inputs.tags == ''
|
||||
run: |
|
||||
cd kestra/ui
|
||||
npm run test:e2e
|
||||
cd kestra
|
||||
./gradlew e2eTestsCheck
|
||||
|
||||
- name: Upload Playwright Report as Github artifact
|
||||
# 'With this report, you can analyze locally the results of the tests. see https://playwright.dev/docs/ci-intro#html-report'
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ !cancelled() }}
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
name: playwright-report
|
||||
path: kestra/playwright-report/
|
||||
retention-days: 7
|
||||
# Allure check
|
||||
# TODO I don't know what it should do
|
||||
# - uses: rlespinasse/github-slug-action@v5
|
||||
# name: Allure - Generate slug variables
|
||||
#
|
||||
# - name: Allure - Publish report
|
||||
# uses: andrcuns/allure-publish-action@v2.9.0
|
||||
# if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
# continue-on-error: true
|
||||
# env:
|
||||
# GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
# JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
# with:
|
||||
# storageType: gcs
|
||||
# resultsGlob: "**/build/allure-results"
|
||||
# bucket: internal-kestra-host
|
||||
# baseUrl: "https://internal.dev.kestra.io"
|
||||
# prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
# copyLatest: true
|
||||
# ignoreMissingResults: true
|
||||
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v4
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.7.1
|
||||
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: build/allure-results
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/playwright') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
111
.github/workflows/generate_translations.yml
vendored
Normal file
111
.github/workflows/generate_translations.yml
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
name: Generate Translations
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "ui/src/translations/en.json"
|
||||
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
jobs:
|
||||
commit:
|
||||
name: Commit directly to PR
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == false }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 10
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Check for changes and commit
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
||||
pull_request:
|
||||
name: Open PR for a forked repository
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == true }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 10
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Create and push a new branch
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="generated-translations-${{ github.event.pull_request.head.repo.name }}"
|
||||
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin $BRANCH_NAME
|
||||
82
.github/workflows/gradle-release-plugins.yml
vendored
82
.github/workflows/gradle-release-plugins.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Run Gradle Release for Kestra Plugins
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
|
||||
required: true
|
||||
type: string
|
||||
dryRun:
|
||||
description: 'Use DRY_RUN mode'
|
||||
required: false
|
||||
default: 'false'
|
||||
jobs:
|
||||
release:
|
||||
name: Release plugins
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
- name: 'Configure Git'
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
if: ${{ github.event.inputs.dryRun == 'false' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
|
||||
- name: Run Gradle Release (DRY_RUN)
|
||||
if: ${{ github.event.inputs.dryRun == 'true' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
--dry-run \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
92
.github/workflows/gradle-release.yml
vendored
92
.github/workflows/gradle-release.yml
vendored
@@ -1,92 +0,0 @@
|
||||
name: Run Gradle Release
|
||||
run-name: "Releasing Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
|
||||
required: true
|
||||
type: string
|
||||
env:
|
||||
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
|
||||
NEXT_VERSION: "${{ github.event.inputs.nextVersion }}"
|
||||
jobs:
|
||||
release:
|
||||
name: Release Kestra
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
run: |
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
|
||||
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
|
||||
exit 1;
|
||||
fi
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
path: kestra
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
|
||||
|
||||
cd kestra
|
||||
|
||||
# Create and push release branch
|
||||
git checkout -b "$PUSH_RELEASE_BRANCH";
|
||||
git push -u origin "$PUSH_RELEASE_BRANCH";
|
||||
|
||||
# Run gradle release
|
||||
git checkout develop;
|
||||
|
||||
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
|
||||
# -SNAPSHOT qualifier maybe used to test release-candidates
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}" \
|
||||
-Prelease.failOnSnapshotDependencies=false
|
||||
else
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}"
|
||||
fi
|
||||
475
.github/workflows/main.yml
vendored
475
.github/workflows/main.yml
vendored
@@ -1,73 +1,466 @@
|
||||
name: Main Workflow
|
||||
name: Main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/*
|
||||
- develop
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
repository_dispatch:
|
||||
types: [ rebuild ]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: 'Skip test'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'false'
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
JAVA_VERSION: '21'
|
||||
DOCKER_APT_PACKAGES: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
DOCKER_PYTHON_LIBRARIES: kestra
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
jobs:
|
||||
tests:
|
||||
name: Execute tests
|
||||
uses: ./.github/workflows/workflow-test.yml
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins-list.outputs.plugins }}
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Set Plugin List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
# Build
|
||||
- name: Build with Gradle
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Set up Vars
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Build & Export Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
# Run Reusable Workflow from QA repository
|
||||
check-e2e:
|
||||
name: Check E2E Tests
|
||||
needs: build-artifacts
|
||||
if: ${{ (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') && !startsWith(github.ref, 'refs/tags/v') }}
|
||||
uses: ./.github/workflows/e2e.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
backends: ["postgres"]
|
||||
with:
|
||||
report-status: false
|
||||
tags: oss
|
||||
docker-artifact-name: ${{ needs.build-artifacts.outputs.docker-artifact-name }}
|
||||
docker-image-tag: kestra/kestra:${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
backend: ${{ matrix.backends }}
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
check:
|
||||
env:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
name: Check & Publish
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Build the docker-compose stack
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
|
||||
# Gradle check
|
||||
- name: Build with Gradle
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# # Sonar
|
||||
# - name: Analyze with Sonar
|
||||
# if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
# run: ./gradlew sonar --info
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v4
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.7.1
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: 'Set up Cloud SDK'
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'Copy jacoco files'
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'jacoco') }}
|
||||
|
||||
# report test
|
||||
- name: Test Report
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
if: success() || failure()
|
||||
with:
|
||||
report_paths: '**/build/test-results/**/TEST-*.xml'
|
||||
|
||||
# Codecov
|
||||
- uses: codecov/codecov-action@v4
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
release:
|
||||
name: Release
|
||||
needs: [tests]
|
||||
if: "!startsWith(github.ref, 'refs/heads/releases')"
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
name: Github Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ check, check-e2e ]
|
||||
if: |
|
||||
always() &&
|
||||
startsWith(github.ref, 'refs/tags/v') &&
|
||||
needs.check.result == 'success' &&
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
steps:
|
||||
# Download Exec
|
||||
- name: Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
|
||||
docker:
|
||||
name: Publish Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-artifacts, check, check-e2e]
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: ${{needs.build-artifacts.outputs.docker-tag}}-no-plugins
|
||||
packages: ""
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{needs.build-artifacts.outputs.docker-tag}}
|
||||
plugins: ${{needs.build-artifacts.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Vars
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Docker Image
|
||||
- name: Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{matrix.image.packages}}
|
||||
PYTHON_LIBRARIES=${{matrix.image.python-libraries}}
|
||||
|
||||
maven:
|
||||
name: Publish to Maven
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check, check-e2e]
|
||||
if: |
|
||||
always() &&
|
||||
github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/tags/v') &&
|
||||
needs.check.result == 'success' &&
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish package to Sonatype
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype
|
||||
|
||||
# Release
|
||||
- name: Release package to Maven Central
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
run: |
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-e2e
|
||||
- check
|
||||
- maven
|
||||
- docker
|
||||
- release
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
- name: Trigger EE Workflow
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/kestra-ee
|
||||
event-type: "oss-updated"
|
||||
|
||||
# Update
|
||||
- name: Update internal
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
if: github.ref == 'refs/heads/develop' && needs.docker.result == 'success'
|
||||
with:
|
||||
workflow: oss-build.yml
|
||||
repo: kestra-io/infra
|
||||
ref: master
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
|
||||
# Slack
|
||||
- name: Slack - Notification
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR" # _int_git channel
|
||||
icon_emoji: ':github-actions:'
|
||||
channel: 'C02DQ1A7JLR' # _int_git channel
|
||||
|
||||
74
.github/workflows/pull-request.yml
vendored
74
.github/workflows/pull-request.yml
vendored
@@ -1,74 +0,0 @@
|
||||
name: Pull Request Workflow
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ********************************************************************************************************************
|
||||
# File changes detection
|
||||
# ********************************************************************************************************************
|
||||
file-changes:
|
||||
if: ${{ github.event.pull_request.draft == false }}
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
translations: ${{ steps.changes.outputs.translations }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# ********************************************************************************************************************
|
||||
# Tests
|
||||
# ********************************************************************************************************************
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: [file-changes]
|
||||
if: "needs.file-changes.outputs.ui == 'true'"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true'"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
end:
|
||||
name: End
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs: [frontend, backend]
|
||||
steps:
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR"
|
||||
60
.github/workflows/setversion-tag-plugins.yml
vendored
60
.github/workflows/setversion-tag-plugins.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: Set Version and Tag Plugins
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0)'
|
||||
required: true
|
||||
type: string
|
||||
dryRun:
|
||||
description: 'Use DRY_RUN mode'
|
||||
required: false
|
||||
default: 'false'
|
||||
jobs:
|
||||
tag:
|
||||
name: Release plugins
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
- name: 'Configure Git'
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Set Version and Tag Plugins
|
||||
if: ${{ github.event.inputs.dryRun == 'false' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
|
||||
- name: Set Version and Tag Plugins (DRY_RUN)
|
||||
if: ${{ github.event.inputs.dryRun == 'true' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--dry-run \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
57
.github/workflows/setversion-tag.yml
vendored
57
.github/workflows/setversion-tag.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Set Version and Tag
|
||||
run-name: "Set version and Tag Kestra to ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.1)'
|
||||
required: true
|
||||
type: string
|
||||
env:
|
||||
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
|
||||
jobs:
|
||||
release:
|
||||
name: Release Kestra
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/heads/releases/v')
|
||||
steps:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
run: |
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)(\.[0-9]+)(-rc[0-9])?(-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
|
||||
|
||||
CURRENT_BRANCH="$GITHUB_REF"
|
||||
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
|
||||
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
# Update version
|
||||
sed -i "s/^version=.*/version=$RELEASE_VERSION/" ./gradle.properties
|
||||
git add ./gradle.properties
|
||||
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
|
||||
git push
|
||||
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
|
||||
git push --tags
|
||||
44
.github/workflows/vulnerabilities-check.yml
vendored
44
.github/workflows/vulnerabilities-check.yml
vendored
@@ -8,9 +8,6 @@ on:
|
||||
env:
|
||||
JAVA_VERSION: '21'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-check:
|
||||
name: Dependency Check
|
||||
@@ -36,16 +33,8 @@ jobs:
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Npm
|
||||
- name: Npm - Install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Run OWASP dependency check plugin
|
||||
- name: Gradle Dependency Check
|
||||
env:
|
||||
NVD_API_KEY: ${{ secrets.NIST_APIKEY }}
|
||||
run: |
|
||||
./gradlew dependencyCheckAggregate
|
||||
|
||||
@@ -60,10 +49,6 @@ jobs:
|
||||
develop-image-check:
|
||||
name: Image Check (develop)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
@@ -87,28 +72,16 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.31.0
|
||||
uses: aquasecurity/trivy-action@0.24.0
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: 'template'
|
||||
template: '@/contrib/sarif.tpl'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
output: 'trivy-results.sarif'
|
||||
format: table
|
||||
skip-dirs: /app/plugins
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: docker-
|
||||
scanners: vuln
|
||||
|
||||
latest-image-check:
|
||||
name: Image Check (latest)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
@@ -132,16 +105,9 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.31.0
|
||||
uses: aquasecurity/trivy-action@0.24.0
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
skip-dirs: /app/plugins
|
||||
scanners: vuln
|
||||
severity: 'CRITICAL,HIGH'
|
||||
output: 'trivy-results.sarif'
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
scanners: vuln
|
||||
142
.github/workflows/workflow-backend-test.yml
vendored
142
.github/workflows/workflow-backend-test.yml
vendored
@@ -1,142 +0,0 @@
|
||||
name: Backend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
SONAR_TOKEN:
|
||||
description: 'Sonar Token'
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Setup - Start docker compose
|
||||
shell: bash
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
|
||||
# Gradle check
|
||||
- name: Gradle - Build
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
reporter: java-junit
|
||||
path: '**/build/test-results/test/TEST-*.xml'
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
if: env.SONAR_TOKEN != ''
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
shell: bash
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# GCP
|
||||
- name: GCP - Auth with unit test account
|
||||
id: auth
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
uses: "google-github-actions/auth@v2"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- name: GCP - Setup Cloud SDK
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
uses: "google-github-actions/setup-gcloud@v2"
|
||||
|
||||
# Allure check
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
name: Allure - Generate slug variables
|
||||
|
||||
- name: Allure - Publish report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: Jacoco - Copy reports
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# Codecov
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
152
.github/workflows/workflow-build-artifacts.yml
vendored
152
.github/workflows/workflow-build-artifacts.yml
vendored
@@ -1,152 +0,0 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
outputs:
|
||||
docker-tag:
|
||||
value: ${{ jobs.build.outputs.docker-tag }}
|
||||
description: "The Docker image Tag for Kestra"
|
||||
docker-artifact-name:
|
||||
value: ${{ jobs.build.outputs.docker-artifact-name }}
|
||||
description: "The GitHub artifact containing the Kestra docker image name."
|
||||
plugins:
|
||||
value: ${{ jobs.build.outputs.plugins }}
|
||||
description: "The Kestra plugins list used for the build."
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build - Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Npm
|
||||
- name: Setup - Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Plugins - Set List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Gradle - Build
|
||||
shell: bash
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Artifacts - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Setup - Docker vars
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Docker - Build & export image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Artifacts - Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Artifacts - Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Artifacts - Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
86
.github/workflows/workflow-frontend-test.yml
vendored
86
.github/workflows/workflow-frontend-test.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: Frontend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
|
||||
env:
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cache Node Modules
|
||||
id: cache-node-modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
ui/node_modules
|
||||
key: modules-${{ hashFiles('ui/package-lock.json') }}
|
||||
|
||||
- name: Cache Playwright Binaries
|
||||
id: cache-playwright
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/ms-playwright
|
||||
key: playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
|
||||
- name: Npm - install
|
||||
if: steps.cache-node-modules.outputs.cache-hit != 'true'
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm - lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
reporter: github-pr-review
|
||||
workdir: ui
|
||||
|
||||
- name: Npm - Run build
|
||||
working-directory: ui
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
working-directory: ui
|
||||
run: npm run test:unit -- --coverage
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
working-directory: ui
|
||||
if: steps.cache-playwright.outputs.cache-hit != 'true'
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Run storybook component tests
|
||||
working-directory: ui
|
||||
run: npm run test:storybook -- --coverage
|
||||
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
flags: frontend
|
||||
65
.github/workflows/workflow-github-release.yml
vendored
65
.github/workflows/workflow-github-release.yml
vendored
@@ -1,65 +0,0 @@
|
||||
name: Github - Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "The Github personal token."
|
||||
required: true
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Github - Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check out
|
||||
- name: Checkout - Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- name: Checkout - Actions
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
sparse-checkout-cone-mode: true
|
||||
path: actions
|
||||
sparse-checkout: |
|
||||
.github/actions
|
||||
|
||||
# Download Exec
|
||||
# Must be done after checkout actions
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
uses: ./actions/.github/actions/github-release
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
146
.github/workflows/workflow-publish-docker.yml
vendored
146
.github/workflows/workflow-publish-docker.yml
vendored
@@ -1,146 +0,0 @@
|
||||
name: Publish - Docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: string
|
||||
default: "true"
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: string
|
||||
default: "true"
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# ********************************************************************************************************************
|
||||
# Build
|
||||
# ********************************************************************************************************************
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
if: ${{ github.event.inputs.force-download-artifact == 'true' }}
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
# ********************************************************************************************************************
|
||||
# Docker
|
||||
# ********************************************************************************************************************
|
||||
publish:
|
||||
name: Publish - Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-artifacts
|
||||
if: |
|
||||
always() &&
|
||||
(needs.build-artifacts.result == 'success' ||
|
||||
github.event.inputs.force-download-artifact != 'true')
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: -no-plugins
|
||||
packages: jattach
|
||||
plugins: false
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ""
|
||||
plugins: true
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Docker - Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
|
||||
# # Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins-list
|
||||
if: ${{ matrix.image.plugins}}
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Vars
|
||||
- name: Docker - Set variables
|
||||
shell: bash
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
PLUGINS="${{ matrix.image.plugins == true && steps.plugins-list.outputs.plugins || '' }}"
|
||||
if [[ $TAG == v* ]]; then
|
||||
TAG="${TAG}";
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
elif [[ $TAG = "develop" ]]; then
|
||||
TAG="develop";
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "tag=${TAG}${{ matrix.image.tag }}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Docker - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Docker - Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libraries }}
|
||||
57
.github/workflows/workflow-publish-maven.yml
vendored
57
.github/workflows/workflow-publish-maven.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Publish - Maven
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish - Maven
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Setup build
|
||||
- name: Setup - Build
|
||||
uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish - Release package to Maven Central
|
||||
shell: bash
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_mavenCentralUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_mavenCentralPassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToMavenCentral
|
||||
|
||||
# Gradle dependency
|
||||
- name: Java - Gradle dependency graph
|
||||
uses: gradle/actions/dependency-submission@v4
|
||||
80
.github/workflows/workflow-release.yml
vendored
80
.github/workflows/workflow-release.yml
vendored
@@ -1,80 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
publish-docker:
|
||||
description: "Publish Docker image"
|
||||
default: 'false'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
if: startsWith(github.ref, 'refs/heads/develop') || github.event.inputs.publish-docker == 'true'
|
||||
with:
|
||||
force-download-artifact: 'false'
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
uses: ./.github/workflows/workflow-publish-maven.yml
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
97
.github/workflows/workflow-test.yml
vendored
97
.github/workflows/workflow-test.yml
vendored
@@ -1,97 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * 1,2,3,4,5'
|
||||
workflow_call:
|
||||
inputs:
|
||||
report-status:
|
||||
description: "Report status of the jobs in outputs"
|
||||
type: string
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
frontend_status:
|
||||
description: "Status of the frontend job"
|
||||
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status:
|
||||
description: "Status of the backend job"
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
# Output every job status
|
||||
# To be used in other workflows
|
||||
report-status:
|
||||
name: Report Status
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: always() && (inputs.report-status == 'true')
|
||||
outputs:
|
||||
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
|
||||
steps:
|
||||
- id: set-frontend-status
|
||||
name: Set frontend job status
|
||||
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
|
||||
|
||||
- id: set-backend-status
|
||||
name: Set backend job status
|
||||
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
|
||||
|
||||
notify:
|
||||
name: Notify - Slack
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: |
|
||||
always() && (needs.frontend.result != 'success' ||
|
||||
needs.backend.result != 'success')
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -36,8 +36,6 @@ yarn.lock
|
||||
ui/coverage
|
||||
ui/stats.html
|
||||
ui/.frontend-gradle-plugin
|
||||
ui/utils/CHANGELOG.md
|
||||
ui/test-report.junit.xml
|
||||
|
||||
### Docker
|
||||
/.env
|
||||
@@ -56,7 +54,3 @@ core/src/main/resources/gradle.properties
|
||||
|
||||
# Allure Reports
|
||||
**/allure-results/*
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
/jmh-benchmarks/src/main/resources/gradle.properties
|
||||
|
||||
210
.plugins
210
.plugins
@@ -1,116 +1,102 @@
|
||||
#
|
||||
# List of plugins to install locally with: $ make install-plugins
|
||||
# Format: <RepositoryName>:<GroupId>:<ArtifactId>:<Version>
|
||||
#
|
||||
# Uncomment the lines corresponding to the plugins to be installed:
|
||||
#plugin-airbyte:io.kestra.plugin:plugin-airbyte:LATEST
|
||||
#plugin-airflow:io.kestra.plugin:plugin-airflow:LATEST
|
||||
#plugin-amqp:io.kestra.plugin:plugin-amqp:LATEST
|
||||
#plugin-ansible:io.kestra.plugin:plugin-ansible:LATEST
|
||||
#plugin-aws:io.kestra.plugin:plugin-aws:LATEST
|
||||
#plugin-azure:io.kestra.plugin:plugin-azure:LATEST
|
||||
#plugin-cassandra:io.kestra.plugin:plugin-cassandra:LATEST
|
||||
#plugin-cloudquery:io.kestra.plugin:plugin-cloudquery:LATEST
|
||||
#plugin-compress:io.kestra.plugin:plugin-compress:LATEST
|
||||
#plugin-couchbase:io.kestra.plugin:plugin-couchbase:LATEST
|
||||
#plugin-crypto:io.kestra.plugin:plugin-crypto:LATEST
|
||||
#plugin-databricks:io.kestra.plugin:plugin-databricks:LATEST
|
||||
#plugin-datahub:io.kestra.plugin:plugin-datahub:LATEST
|
||||
#plugin-dataform:io.kestra.plugin:plugin-dataform:LATEST
|
||||
#plugin-dbt:io.kestra.plugin:plugin-dbt:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-db2:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-mongodb:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-mysql:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-oracle:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-postgres:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-sqlserver:LATEST
|
||||
#plugin-docker:io.kestra.plugin:plugin-docker:LATEST
|
||||
#plugin-elasticsearch:io.kestra.plugin:plugin-elasticsearch:LATEST
|
||||
#plugin-fivetran:io.kestra.plugin:plugin-fivetran:LATEST
|
||||
#plugin-fs:io.kestra.plugin:plugin-fs:LATEST
|
||||
#plugin-gcp:io.kestra.plugin:plugin-gcp:LATEST
|
||||
#plugin-gemini:io.kestra.plugin:plugin-gemini:LATEST
|
||||
#plugin-git:io.kestra.plugin:plugin-git:LATEST
|
||||
#plugin-github:io.kestra.plugin:plugin-github:LATEST
|
||||
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
|
||||
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
|
||||
#plugin-graphql:io.kestra.plugin:plugin-graphql:LATEST
|
||||
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
|
||||
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
|
||||
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
|
||||
#plugin-influxdb:io.kestra.plugin:plugin-influxdb:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-as400:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-db2:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-duckdb:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-druid:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mariadb:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mysql:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-oracle:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-pinot:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-postgres:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-redshift:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-snowflake:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlserver:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-trino:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-vectorwise:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-vertica:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-dremio:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlite:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
|
||||
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
|
||||
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
|
||||
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
|
||||
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
|
||||
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
|
||||
#plugin-langchain4j:io.kestra.plugin:plugin-langchain4j:LATEST
|
||||
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
|
||||
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
|
||||
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
|
||||
#plugin-meilisearch:io.kestra.plugin:plugin-meilisearch:LATEST
|
||||
#plugin-minio:io.kestra.plugin:plugin-minio:LATEST
|
||||
#plugin-modal:io.kestra.plugin:plugin-modal:LATEST
|
||||
#plugin-mongodb:io.kestra.plugin:plugin-mongodb:LATEST
|
||||
#plugin-mqtt:io.kestra.plugin:plugin-mqtt:LATEST
|
||||
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
|
||||
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
|
||||
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
|
||||
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
|
||||
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
|
||||
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
|
||||
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
|
||||
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
|
||||
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-jython:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-nashorn:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-node:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-powershell:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-python:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-r:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-ruby:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-shell:LATEST
|
||||
#plugin-serdes:io.kestra.plugin:plugin-serdes:LATEST
|
||||
#plugin-servicenow:io.kestra.plugin:plugin-servicenow:LATEST
|
||||
#plugin-sifflet:io.kestra.plugin:plugin-sifflet:LATEST
|
||||
#plugin-singer:io.kestra.plugin:plugin-singer:LATEST
|
||||
#plugin-soda:io.kestra.plugin:plugin-soda:LATEST
|
||||
#plugin-solace:io.kestra.plugin:plugin-solace:LATEST
|
||||
#plugin-spark:io.kestra.plugin:plugin-spark:LATEST
|
||||
#plugin-sqlmesh:io.kestra.plugin:plugin-sqlmesh:LATEST
|
||||
#plugin-surrealdb:io.kestra.plugin:plugin-surrealdb:LATEST
|
||||
#plugin-terraform:io.kestra.plugin:plugin-terraform:LATEST
|
||||
#plugin-transform:io.kestra.plugin:plugin-transform-grok:LATEST
|
||||
#plugin-transform:io.kestra.plugin:plugin-transform-json:LATEST
|
||||
#plugin-tika:io.kestra.plugin:plugin-tika:LATEST
|
||||
#plugin-weaviate:io.kestra.plugin:plugin-weaviate:LATEST
|
||||
#plugin-zendesk:io.kestra.plugin:plugin-zendesk:LATEST
|
||||
#plugin-typesense:io.kestra.plugin:plugin-typesense:LATEST
|
||||
#storage-azure:io.kestra.storage:storage-azure:LATEST
|
||||
#storage-gcs:io.kestra.storage:storage-gcs:LATEST
|
||||
#storage-minio:io.kestra.storage:storage-minio:LATEST
|
||||
#storage-s3:io.kestra.storage:storage-s3:LATEST
|
||||
#
|
||||
#io.kestra.plugin:plugin-airbyte:LATEST
|
||||
#io.kestra.plugin:plugin-airflow:LATEST
|
||||
#io.kestra.plugin:plugin-amqp:LATEST
|
||||
#io.kestra.plugin:plugin-ansible:LATEST
|
||||
#io.kestra.plugin:plugin-aws:LATEST
|
||||
#io.kestra.plugin:plugin-azure:LATEST
|
||||
#io.kestra.plugin:plugin-cassandra:LATEST
|
||||
#io.kestra.plugin:plugin-cloudquery:LATEST
|
||||
#io.kestra.plugin:plugin-compress:LATEST
|
||||
#io.kestra.plugin:plugin-couchbase:LATEST
|
||||
#io.kestra.plugin:plugin-crypto:LATEST
|
||||
#io.kestra.plugin:plugin-databricks:LATEST
|
||||
#io.kestra.plugin:plugin-datahub:LATEST
|
||||
#io.kestra.plugin:plugin-dataform:LATEST
|
||||
#io.kestra.plugin:plugin-dbt:LATEST
|
||||
#io.kestra.plugin:plugin-debezium-db2:LATEST
|
||||
#io.kestra.plugin:plugin-debezium-mongodb:LATEST
|
||||
#io.kestra.plugin:plugin-debezium-mysql:LATEST
|
||||
#io.kestra.plugin:plugin-debezium-oracle:LATEST
|
||||
#io.kestra.plugin:plugin-debezium-postgres:LATEST
|
||||
#io.kestra.plugin:plugin-debezium-sqlserver:LATEST
|
||||
#io.kestra.plugin:plugin-docker:LATEST
|
||||
#io.kestra.plugin:plugin-elasticsearch:LATEST
|
||||
#io.kestra.plugin:plugin-fivetran:LATEST
|
||||
#io.kestra.plugin:plugin-fs:LATEST
|
||||
#io.kestra.plugin:plugin-gcp:LATEST
|
||||
#io.kestra.plugin:plugin-git:LATEST
|
||||
#io.kestra.plugin:plugin-github:LATEST
|
||||
#io.kestra.plugin:plugin-googleworkspace:LATEST
|
||||
#io.kestra.plugin:plugin-hightouch:LATEST
|
||||
#io.kestra.plugin:plugin-hubspot:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-as400:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-db2:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-duckdb:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-druid:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-mysql:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-oracle:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-pinot:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-postgres:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-redshift:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-snowflake:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-sqlserver:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-trino:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-vectorwise:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-vertica:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-dremio:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-sqlite:LATEST
|
||||
#io.kestra.plugin:plugin-jdbc-sybase:LATEST
|
||||
#io.kestra.plugin:plugin-jira:LATEST
|
||||
#io.kestra.plugin:plugin-kafka:LATEST
|
||||
#io.kestra.plugin:plugin-kubernetes:LATEST
|
||||
#io.kestra.plugin:plugin-ldap:LATEST
|
||||
#io.kestra.plugin:plugin-linear:LATEST
|
||||
#io.kestra.plugin:plugin-malloy:LATEST
|
||||
#io.kestra.plugin:plugin-meilisearch:LATEST
|
||||
#io.kestra.plugin:plugin-minio:LATEST
|
||||
#io.kestra.plugin:plugin-modal:LATEST
|
||||
#io.kestra.plugin:plugin-mongodb:LATEST
|
||||
#io.kestra.plugin:plugin-mqtt:LATEST
|
||||
#io.kestra.plugin:plugin-nats:LATEST
|
||||
#io.kestra.plugin:plugin-neo4j:LATEST
|
||||
#io.kestra.plugin:plugin-notifications:LATEST
|
||||
#io.kestra.plugin:plugin-openai:LATEST
|
||||
#io.kestra.plugin:plugin-powerbi:LATEST
|
||||
#io.kestra.plugin:plugin-pulsar:LATEST
|
||||
#io.kestra.plugin:plugin-redis:LATEST
|
||||
#io.kestra.plugin:plugin-script-groovy:LATEST
|
||||
#io.kestra.plugin:plugin-script-jbang:LATEST
|
||||
#io.kestra.plugin:plugin-script-julia:LATEST
|
||||
#io.kestra.plugin:plugin-script-jython:LATEST
|
||||
#io.kestra.plugin:plugin-script-nashorn:LATEST
|
||||
#io.kestra.plugin:plugin-script-node:LATEST
|
||||
#io.kestra.plugin:plugin-script-powershell:LATEST
|
||||
#io.kestra.plugin:plugin-script-python:LATEST
|
||||
#io.kestra.plugin:plugin-script-r:LATEST
|
||||
#io.kestra.plugin:plugin-script-ruby:LATEST
|
||||
#io.kestra.plugin:plugin-script-shell:LATEST
|
||||
#io.kestra.plugin:plugin-serdes:LATEST
|
||||
#io.kestra.plugin:plugin-servicenow:LATEST
|
||||
#io.kestra.plugin:plugin-singer:LATEST
|
||||
#io.kestra.plugin:plugin-soda:LATEST
|
||||
#io.kestra.plugin:plugin-solace:LATEST
|
||||
#io.kestra.plugin:plugin-spark:LATEST
|
||||
#io.kestra.plugin:plugin-sqlmesh:LATEST
|
||||
#io.kestra.plugin:plugin-surrealdb:LATEST
|
||||
#io.kestra.plugin:plugin-terraform:LATEST
|
||||
#io.kestra.plugin:plugin-transform-grok:LATEST
|
||||
#io.kestra.plugin:plugin-transform-json:LATEST
|
||||
#io.kestra.plugin:plugin-tika:LATEST
|
||||
#io.kestra.plugin:plugin-weaviate:LATEST
|
||||
#io.kestra.plugin:plugin-zendesk:LATEST
|
||||
#io.kestra.storage:storage-azure:LATEST
|
||||
#io.kestra.storage:storage-gcs:LATEST
|
||||
#io.kestra.storage:storage-minio:LATEST
|
||||
#io.kestra.storage:storage-s3:LATEST
|
||||
@@ -1 +0,0 @@
|
||||
**/*.*
|
||||
@@ -16,9 +16,8 @@ RUN apt-get update -y && \
|
||||
if [ -n "${APT_PACKAGES}" ]; then apt-get install -y --no-install-recommends ${APT_PACKAGES}; fi && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /var/tmp/* /tmp/* && \
|
||||
curl -LsSf https://astral.sh/uv/0.6.17/install.sh | sh && mv /root/.local/bin/uv /bin && mv /root/.local/bin/uvx /bin && \
|
||||
if [ -n "${KESTRA_PLUGINS}" ]; then /app/kestra plugins install ${KESTRA_PLUGINS} && rm -rf /tmp/*; fi && \
|
||||
if [ -n "${PYTHON_LIBRARIES}" ]; then uv pip install --system ${PYTHON_LIBRARIES}; fi && \
|
||||
if [ -n "${PYTHON_LIBRARIES}" ]; then pip install ${PYTHON_LIBRARIES}; fi && \
|
||||
chown -R kestra:kestra /app
|
||||
|
||||
USER kestra
|
||||
|
||||
90
Makefile
90
Makefile
@@ -17,8 +17,6 @@ VERSION := $(shell ./gradlew properties -q | awk '/^version:/ {print $$2}')
|
||||
GIT_COMMIT := $(shell git rev-parse --short HEAD)
|
||||
GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
|
||||
DATE := $(shell date --rfc-3339=seconds)
|
||||
PLUGIN_GIT_DIR ?= $(pwd)/..
|
||||
PLUGIN_JARS_DIR ?= $(pwd)/locals/plugins
|
||||
|
||||
DOCKER_IMAGE = kestra/kestra
|
||||
DOCKER_PATH = ./
|
||||
@@ -69,7 +67,6 @@ install-plugins:
|
||||
[[ $$plugin =~ ^#.* ]] && continue; \
|
||||
PLUGINS_PATH="${KESTRA_INSTALL_DIR}/plugins"; \
|
||||
CURRENT_PLUGIN=$${plugin/LATEST/"${VERSION}"}; \
|
||||
CURRENT_PLUGIN=$$(echo $$CURRENT_PLUGIN | cut -d':' -f2-); \
|
||||
PLUGIN_FILE="$$PLUGINS_PATH/$$(echo $$CURRENT_PLUGIN | awk -F':' '{print $$2"-"$$3}').jar"; \
|
||||
echo "Installing Kestra plugin $$CURRENT_PLUGIN > ${KESTRA_INSTALL_DIR}/plugins"; \
|
||||
if [ -f "$$PLUGIN_FILE" ]; then \
|
||||
@@ -89,7 +86,7 @@ build-docker: build-exec
|
||||
--compress \
|
||||
--rm \
|
||||
-f ./Dockerfile \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip" \
|
||||
--build-arg="PYTHON_LIBRARIES=kestra" \
|
||||
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
|
||||
|
||||
@@ -176,88 +173,3 @@ start-standalone-postgres: kill --private-start-standalone-postgres health
|
||||
|
||||
start-standalone-local: kill --private-start-standalone-local health
|
||||
|
||||
#checkout all plugins
|
||||
clone-plugins:
|
||||
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
|
||||
@mkdir -p "$(PLUGIN_GIT_DIR)"
|
||||
@echo "Fetching repository list from GitHub..."
|
||||
@REPOS=$$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-"); \
|
||||
for repo in $$REPOS; do \
|
||||
if [[ $$repo == plugin-* ]]; then \
|
||||
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
|
||||
echo "Skipping: $$repo (Already cloned)"; \
|
||||
else \
|
||||
echo "Cloning: $$repo using SSH..."; \
|
||||
git clone "git@github.com:kestra-io/$$repo.git" "$(PLUGIN_GIT_DIR)/$$repo"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done
|
||||
@echo "Done!"
|
||||
|
||||
# Pull every plugins in main or master branch
|
||||
pull-plugins:
|
||||
@echo "🔍 Pulling repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
@for repo in "$(PLUGIN_GIT_DIR)"/*; do \
|
||||
if [ -d "$$repo/.git" ]; then \
|
||||
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
|
||||
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
|
||||
echo "🔄 Pulling: $$(basename "$$repo") (branch: $$branch)"; \
|
||||
git -C "$$repo" pull; \
|
||||
else \
|
||||
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch, currently on $$branch)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done
|
||||
@echo "✅ Done pulling!"
|
||||
|
||||
# Update all plugins jar
|
||||
build-plugins:
|
||||
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
@MASTER_REPOS=(); \
|
||||
for repo in "$(PLUGIN_GIT_DIR)"/*; do \
|
||||
if [ -d "$$repo/.git" ]; then \
|
||||
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
|
||||
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
|
||||
MASTER_REPOS+=("$$repo"); \
|
||||
else \
|
||||
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 2: Update Repos on Master or Main Branch === \
|
||||
echo "⬇️ Updating repositories on master or main branch..."; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
echo "🔄 Updating: $$(basename "$$repo")"; \
|
||||
git -C "$$repo" pull --rebase; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 3: Build with Gradle === \
|
||||
echo "⚙️ Building repositories with Gradle..."; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
echo "🔨 Building: $$(basename "$$repo")"; \
|
||||
gradle clean build -x test shadowJar -p "$$repo"; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 4: Copy Latest JARs (Ignoring javadoc & sources) === \
|
||||
echo "📦 Organizing built JARs..."; \
|
||||
mkdir -p "$(PLUGIN_JARS_DIR)"; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
REPO_NAME=$$(basename "$$repo"); \
|
||||
\
|
||||
JARS=($$(find "$$repo" -type f -name "plugin-*.jar" ! -name "*-javadoc.jar" ! -name "*-sources.jar")); \
|
||||
if [ $${#JARS[@]} -eq 0 ]; then \
|
||||
echo "⚠️ Warning: No valid plugin JARs found for $$REPO_NAME"; \
|
||||
continue; \
|
||||
fi; \
|
||||
\
|
||||
for jar in "$${JARS[@]}"; do \
|
||||
JAR_NAME=$$(basename "$$jar"); \
|
||||
BASE_NAME=$$(echo "$$JAR_NAME" | sed -E 's/(-[0-9]+.*)?\.jar$$//'); \
|
||||
rm -f "$(PLUGIN_JARS_DIR)/$$BASE_NAME"-[0-9]*.jar; \
|
||||
cp "$$jar" "$(PLUGIN_JARS_DIR)/"; \
|
||||
echo "✅ Copied JAR: $$JAR_NAME"; \
|
||||
done; \
|
||||
done; \
|
||||
\
|
||||
echo "🎉 Done! All master and main branch repos updated, built, and organized."
|
||||
43
README.md
43
README.md
@@ -19,18 +19,11 @@
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
<a href="https://x.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="X(formerly Twitter)" /></a>
|
||||
<a href="https://twitter.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="twitter" /></a>
|
||||
<a href="https://www.linkedin.com/company/kestra/"><img height="25" src="https://kestra.io/linkedin.svg" alt="linkedin" /></a>
|
||||
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://trendshift.io/repositories/2714" target="_blank">
|
||||
<img src="https://trendshift.io/api/badge/repositories/2714" alt="kestra-io%2Fkestra | Trendshift" width="250" height="55"/>
|
||||
</a>
|
||||
<a href="https://www.producthunt.com/posts/kestra?embed=true&utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-kestra" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=612077&theme=light&period=daily&t=1740737506162" alt="Kestra - All-in-one automation & orchestration platform | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://go.kestra.io/video/product-overview" target="_blank">
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
|
||||
@@ -54,7 +47,7 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
- **Structure & Resilience**: tame chaos and bring resilience to your workflows with **namespaces**, **labels**, **subflows**, **retries**, **timeout**, **error handling**, **inputs**, **outputs** that generate artifacts in the UI, **variables**, **conditional branching**, **advanced scheduling**, **event triggers**, **backfills**, **dynamic tasks**, **sequential and parallel tasks**, and skip tasks or triggers when needed by setting the flag `disabled` to `true`.
|
||||
|
||||
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
|
||||
|
||||
<p align="center">
|
||||
@@ -81,27 +74,6 @@ docker run --pull=always --rm -it -p 8080:8080 --user=root \
|
||||
-v /tmp:/tmp kestra/kestra:latest server local
|
||||
```
|
||||
|
||||
If you're on Windows and use PowerShell:
|
||||
```powershell
|
||||
docker run --pull=always --rm -it -p 8080:8080 --user=root `
|
||||
-v "/var/run/docker.sock:/var/run/docker.sock" `
|
||||
-v "C:/Temp:/tmp" kestra/kestra:latest server local
|
||||
```
|
||||
|
||||
If you're on Windows and use Command Prompt (CMD):
|
||||
```cmd
|
||||
docker run --pull=always --rm -it -p 8080:8080 --user=root ^
|
||||
-v "/var/run/docker.sock:/var/run/docker.sock" ^
|
||||
-v "C:/Temp:/tmp" kestra/kestra:latest server local
|
||||
```
|
||||
|
||||
If you're on Windows and use WSL (Linux-based environment in Windows):
|
||||
```bash
|
||||
docker run --pull=always --rm -it -p 8080:8080 --user=root \
|
||||
-v "/var/run/docker.sock:/var/run/docker.sock" \
|
||||
-v "C:/Temp:/tmp" kestra/kestra:latest server local
|
||||
```
|
||||
|
||||
Check our [Installation Guide](https://kestra.io/docs/installation) for other deployment options (Docker Compose, Podman, Kubernetes, AWS, GCP, Azure, and more).
|
||||
|
||||
Access the Kestra UI at [http://localhost:8080](http://localhost:8080) and start building your first flow!
|
||||
@@ -170,7 +142,7 @@ Kestra provides an intuitive UI that allows you to interactively build and visua
|
||||
|
||||
- **Drag-and-Drop Interface:** add and rearrange tasks from the Topology Editor.
|
||||
- **Real-Time Validation:** instant feedback on your workflow's syntax and structure to catch errors early.
|
||||
- **Auto-Completion:** smart suggestions as you type to write flow code quickly and without syntax errors.
|
||||
- **Auto-Completion:** smart suggestions as you type.
|
||||
- **Live Topology View:** see your workflow as a Directed Acyclic Graph (DAG) that updates in real-time.
|
||||
|
||||
---
|
||||
@@ -195,9 +167,9 @@ Create custom plugins to extend Kestra's capabilities. Check out our [Plugin Dev
|
||||
Stay connected and get support:
|
||||
|
||||
- **Slack:** Join our [Slack community](https://kestra.io/slack) to ask questions and share ideas.
|
||||
- **LinkedIn:** Follow us on [LinkedIn](https://www.linkedin.com/company/kestra/) — next to Slack and GitHub, this is our main channel to share updates and product announcements.
|
||||
- **YouTube:** Subscribe to our [YouTube channel](https://www.youtube.com/@kestra-io) for educational video content. We publish new videos every week!
|
||||
- **X:** Follow us on [X](https://x.com/kestra_io) if you're still active there.
|
||||
- **Twitter:** Follow us on [Twitter](https://twitter.com/kestra_io) for the latest updates.
|
||||
- **YouTube:** Subscribe to our [YouTube channel](https://www.youtube.com/@kestra-io) for tutorials and webinars.
|
||||
- **LinkedIn:** Connect with us on [LinkedIn](https://www.linkedin.com/company/kestra/).
|
||||
|
||||
---
|
||||
|
||||
@@ -206,9 +178,8 @@ Stay connected and get support:
|
||||
We welcome contributions of all kinds!
|
||||
|
||||
- **Report Issues:** Found a bug or have a feature request? Open an [issue on GitHub](https://github.com/kestra-io/kestra/issues).
|
||||
- **Contribute Code:** Check out our [Contributor Guide](https://kestra.io/docs/getting-started/contributing) for initial guidelines, and explore our [good first issues](https://go.kestra.io/contributing) for beginner-friendly tasks to tackle first.
|
||||
- **Contribute Code:** Check out our [Contributor Guide](https://kestra.io/docs/getting-started/contributing) to start contributing.
|
||||
- **Develop Plugins:** Build and share plugins using our [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/).
|
||||
- **Contribute to our Docs:** Contribute edits or updates to keep our [documentation](https://github.com/kestra-io/docs) top-notch.
|
||||
|
||||
---
|
||||
|
||||
|
||||
33
SECURITY.md
33
SECURITY.md
@@ -1,33 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for the following versions of Kestra:
|
||||
|
||||
- The `latest` release
|
||||
- Up to two previous minor versions released as a backport upon customer request.
|
||||
|
||||
If you are using an unsupported version, we recommend upgrading to the `latest` version to receive security fixes.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you discover a security vulnerability in Kestra, please report it to us privately to ensure a responsible disclosure process. You can contact our security team at:
|
||||
|
||||
**security@kestra.io**
|
||||
|
||||
### Guidelines for Reporting
|
||||
- Provide a detailed description of the issue, including steps to reproduce it if possible.
|
||||
- Do not disclose the vulnerability publicly until we have confirmed and patched the issue.
|
||||
- If you believe the issue has critical severity, please indicate so in your report to help us prioritize.
|
||||
|
||||
## Our Commitment
|
||||
|
||||
- We will acknowledge your report within **2 business days**.
|
||||
- We will work to verify and address the issue as quickly as possible.
|
||||
- Once the issue is resolved, we will notify you of the fix.
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
We are happy to credit those who report vulnerabilities responsibly in our release notes, unless you prefer to remain anonymous. If you would like to be acknowledged, please include this in your report.
|
||||
|
||||
Thank you for helping to make Kestra more secure!
|
||||
354
build.gradle
354
build.gradle
@@ -1,5 +1,4 @@
|
||||
import net.e175.klaus.zip.ZipPrefixer
|
||||
import org.owasp.dependencycheck.gradle.extension.AnalyzerExtension
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
@@ -16,28 +15,30 @@ plugins {
|
||||
id "java"
|
||||
id 'java-library'
|
||||
id "idea"
|
||||
id "com.gradleup.shadow" version "8.3.8"
|
||||
id "com.github.johnrengelman.shadow" version "8.1.1"
|
||||
id "application"
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "6.2.0.5505"
|
||||
id "org.sonarqube" version "5.1.0.4882"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
id "com.github.ben-manes.versions" version "0.52.0"
|
||||
id "com.github.ben-manes.versions" version "0.51.0"
|
||||
|
||||
// front
|
||||
id 'com.github.node-gradle.node' version '7.1.0'
|
||||
id 'org.siouan.frontend-jdk17' version '8.1.0' apply false
|
||||
|
||||
// release
|
||||
id 'net.researchgate.release' version '3.1.0'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.0"
|
||||
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
|
||||
id 'net.researchgate.release' version '3.0.2'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.4.2"
|
||||
id 'signing'
|
||||
id "com.vanniktech.maven.publish" version "0.33.0"
|
||||
id 'ru.vyarus.pom' version '3.0.0' apply false
|
||||
id 'ru.vyarus.github-info' version '2.0.0' apply false
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.1.3" apply false
|
||||
id "org.owasp.dependencycheck" version "10.0.4" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -50,17 +51,9 @@ idea {
|
||||
/**********************************************************************************************************************\
|
||||
* Main
|
||||
**********************************************************************************************************************/
|
||||
final mainClassName = "io.kestra.cli.App"
|
||||
final targetJavaVersion = JavaVersion.VERSION_21
|
||||
|
||||
application {
|
||||
mainClass = mainClassName
|
||||
}
|
||||
|
||||
java {
|
||||
sourceCompatibility = targetJavaVersion
|
||||
targetCompatibility = targetJavaVersion
|
||||
}
|
||||
mainClassName = "io.kestra.cli.App"
|
||||
sourceCompatibility = 21
|
||||
targetCompatibility = 21
|
||||
|
||||
dependencies {
|
||||
implementation project(":cli")
|
||||
@@ -72,12 +65,10 @@ dependencies {
|
||||
**********************************************************************************************************************/
|
||||
allprojects {
|
||||
if (it.name != 'platform') {
|
||||
group = "io.kestra"
|
||||
group "io.kestra"
|
||||
|
||||
java {
|
||||
sourceCompatibility = targetJavaVersion
|
||||
targetCompatibility = targetJavaVersion
|
||||
}
|
||||
sourceCompatibility = 21
|
||||
targetCompatibility = 21
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
@@ -121,7 +112,6 @@ allprojects {
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
|
||||
micronaut "io.micronaut:micronaut-http-client"
|
||||
micronaut "io.micronaut.reactor:micronaut-reactor-http-client"
|
||||
micronaut "io.micronaut.tracing:micronaut-tracing-opentelemetry-http"
|
||||
|
||||
// logs
|
||||
implementation "org.slf4j:slf4j-api"
|
||||
@@ -132,9 +122,6 @@ allprojects {
|
||||
implementation group: 'org.slf4j', name: 'jcl-over-slf4j'
|
||||
implementation group: 'org.fusesource.jansi', name: 'jansi'
|
||||
|
||||
// OTEL
|
||||
implementation "io.opentelemetry:opentelemetry-exporter-otlp"
|
||||
|
||||
// jackson
|
||||
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core'
|
||||
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind'
|
||||
@@ -163,13 +150,11 @@ allprojects {
|
||||
* Test
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
apply plugin: "com.adarshr.test-logger"
|
||||
|
||||
java {
|
||||
sourceCompatibility = targetJavaVersion
|
||||
targetCompatibility = targetJavaVersion
|
||||
}
|
||||
sourceCompatibility = 21
|
||||
targetCompatibility = 21
|
||||
|
||||
dependencies {
|
||||
// Platform
|
||||
@@ -195,15 +180,13 @@ subprojects {
|
||||
testImplementation 'org.hamcrest:hamcrest-library'
|
||||
testImplementation 'org.exparity:hamcrest-date'
|
||||
|
||||
//assertj
|
||||
testImplementation 'org.assertj:assertj-core'
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
|
||||
// set Xmx for test workers
|
||||
maxHeapSize = '4g'
|
||||
maxHeapSize = "4048m"
|
||||
|
||||
// configure en_US default locale for tests
|
||||
systemProperty 'user.language', 'en'
|
||||
@@ -214,8 +197,8 @@ subprojects {
|
||||
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
|
||||
environment 'SECRET_NON_B64_SECRET', "some secret value"
|
||||
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
|
||||
environment 'ENV_TEST1', "true"
|
||||
environment 'ENV_TEST2', "Pass by env"
|
||||
environment 'KESTRA_TEST1', "true"
|
||||
environment 'KESTRA_TEST2', "Pass by env"
|
||||
}
|
||||
|
||||
testlogger {
|
||||
@@ -266,7 +249,7 @@ subprojects {
|
||||
* Allure Reports
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
dependencies {
|
||||
testImplementation platform("io.qameta.allure:allure-bom")
|
||||
testImplementation "io.qameta.allure:allure-junit5"
|
||||
@@ -280,7 +263,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.24"
|
||||
agent "org.aspectj:aspectjweaver:1.9.22.1"
|
||||
}
|
||||
|
||||
test {
|
||||
@@ -293,7 +276,7 @@ subprojects {
|
||||
* Jacoco
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
apply plugin: 'jacoco'
|
||||
|
||||
test {
|
||||
@@ -344,17 +327,12 @@ dependencyCheck {
|
||||
failBuildOnCVSS = 7
|
||||
|
||||
// disable the .NET assembly analyzer as otherwise it wants to analyze EXE file
|
||||
analyzers(new Action<AnalyzerExtension>() {
|
||||
@Override
|
||||
void execute(AnalyzerExtension analyzerExtension) {
|
||||
analyzerExtension.assemblyEnabled = false
|
||||
}
|
||||
})
|
||||
analyzers {
|
||||
assemblyEnabled = false
|
||||
}
|
||||
|
||||
// configure a suppression file
|
||||
suppressionFile = "$projectDir/owasp-dependency-suppressions.xml"
|
||||
|
||||
nvd.apiKey = System.getenv("NVD_API_KEY")
|
||||
}
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
@@ -362,7 +340,7 @@ dependencyCheck {
|
||||
**********************************************************************************************************************/
|
||||
allprojects {
|
||||
gradle.projectsEvaluated {
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
tasks.withType(JavaCompile) {
|
||||
options.encoding = "UTF-8"
|
||||
options.compilerArgs.add("-parameters")
|
||||
options.compilerArgs.add("-Xlint:all")
|
||||
@@ -371,7 +349,7 @@ allprojects {
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
tasks.withType(JavaCompile) {
|
||||
options.encoding = "UTF-8"
|
||||
options.compilerArgs.add("-parameters")
|
||||
}
|
||||
@@ -412,30 +390,24 @@ distTar.dependsOn shadowJar
|
||||
startScripts.dependsOn shadowJar
|
||||
startShadowScripts.dependsOn jar
|
||||
shadowJar.dependsOn 'ui:assembleFrontend'
|
||||
shadowJar.dependsOn jar
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
* Executable Jar
|
||||
**********************************************************************************************************************/
|
||||
def executableDir = layout.buildDirectory.dir("executable")
|
||||
def executable = layout.buildDirectory.file("executable/${project.name}-${project.version}").get().asFile
|
||||
def executableDir = file("${buildDir}/executable")
|
||||
def executable = file("${buildDir}/executable/${project.name}-${project.version}")
|
||||
|
||||
tasks.register('writeExecutableJar') {
|
||||
task writeExecutableJar() {
|
||||
group "build"
|
||||
description "Write an executable jar from shadow jar"
|
||||
dependsOn = [shadowJar]
|
||||
|
||||
final shadowJarFile = tasks.shadowJar.outputs.files.singleFile
|
||||
inputs.file shadowJarFile
|
||||
outputs.file executable
|
||||
outputs.cacheIf { true }
|
||||
|
||||
doFirst {
|
||||
executableDir.get().asFile.mkdirs()
|
||||
executableDir.mkdirs()
|
||||
}
|
||||
|
||||
doLast {
|
||||
executable.setBytes(shadowJarFile.readBytes())
|
||||
executable.setBytes(file("${buildDir}/libs/${project.name}-${project.version}.jar").readBytes())
|
||||
ByteArrayOutputStream executableBytes = new ByteArrayOutputStream()
|
||||
executableBytes.write("\n: <<END_OF_KESTRA_SELFRUN\r\n".getBytes())
|
||||
executableBytes.write(file("gradle/jar/selfrun.bat").readBytes())
|
||||
@@ -447,13 +419,13 @@ tasks.register('writeExecutableJar') {
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('executableJar', Zip) {
|
||||
task executableJar(type: Zip) {
|
||||
group "build"
|
||||
description "Zip the executable jar"
|
||||
dependsOn = [writeExecutableJar]
|
||||
|
||||
archiveFileName = "${project.name}-${project.version}.zip"
|
||||
destinationDirectory = layout.buildDirectory.dir('archives')
|
||||
destinationDirectory = file("${buildDir}/archives")
|
||||
|
||||
from executableDir
|
||||
archiveClassifier.set(null)
|
||||
@@ -462,166 +434,118 @@ tasks.register('executableJar', Zip) {
|
||||
/**********************************************************************************************************************\
|
||||
* Standalone
|
||||
**********************************************************************************************************************/
|
||||
tasks.register('runLocal', JavaExec) {
|
||||
task runLocal(type: JavaExec) {
|
||||
group = "application"
|
||||
description = "Run Kestra as server local"
|
||||
classpath = project(":cli").sourceSets.main.runtimeClasspath
|
||||
mainClass = mainClassName
|
||||
environment 'MICRONAUT_ENVIRONMENTS', 'override'
|
||||
args 'server', 'local', '--plugins', 'local/plugins'
|
||||
}
|
||||
|
||||
tasks.register('runStandalone', JavaExec) {
|
||||
group = "application"
|
||||
description = "Run Kestra as server local"
|
||||
classpath = project(":cli").sourceSets.main.runtimeClasspath
|
||||
mainClass = mainClassName
|
||||
environment 'MICRONAUT_ENVIRONMENTS', 'override'
|
||||
args 'server', 'standalone', '--plugins', 'local/plugins'
|
||||
}
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
* Publish
|
||||
**********************************************************************************************************************/
|
||||
subprojects {subProject ->
|
||||
|
||||
if (subProject.name != 'jmh-benchmarks' && subProject.name != rootProject.name) {
|
||||
apply plugin: 'signing'
|
||||
apply plugin: "com.vanniktech.maven.publish"
|
||||
|
||||
javadoc {
|
||||
options {
|
||||
locale = 'en_US'
|
||||
encoding = 'UTF-8'
|
||||
addStringOption("Xdoclint:none", "-quiet")
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('sourcesJar', Jar) {
|
||||
dependsOn = [':core:copyGradleProperties']
|
||||
dependsOn = [':ui:assembleFrontend']
|
||||
archiveClassifier.set('sources')
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
sourcesJar.dependsOn ':core:copyGradleProperties'
|
||||
sourcesJar.dependsOn ':ui:assembleFrontend'
|
||||
|
||||
tasks.register('javadocJar', Jar) {
|
||||
archiveClassifier.set('javadoc')
|
||||
from javadoc
|
||||
}
|
||||
|
||||
tasks.register('testsJar', Jar) {
|
||||
group = 'build'
|
||||
description = 'Build the tests jar'
|
||||
|
||||
archiveClassifier.set('tests')
|
||||
if (sourceSets.matching { it.name == 'test'}) {
|
||||
from sourceSets.named('test').get().output
|
||||
}
|
||||
}
|
||||
|
||||
//These modules should not be published
|
||||
def unpublishedModules = ["jdbc-mysql", "jdbc-postgres", "webserver"]
|
||||
if (subProject.name in unpublishedModules){
|
||||
return
|
||||
}
|
||||
|
||||
mavenPublishing {
|
||||
publishToMavenCentral(true)
|
||||
signAllPublications()
|
||||
|
||||
coordinates(
|
||||
"${rootProject.group}",
|
||||
subProject.name == "cli" ? rootProject.name : subProject.name,
|
||||
"${rootProject.version}"
|
||||
)
|
||||
|
||||
pom {
|
||||
name = project.name
|
||||
description = "${project.group}:${project.name}:${rootProject.version}"
|
||||
url = "https://github.com/kestra-io/${rootProject.name}"
|
||||
|
||||
licenses {
|
||||
license {
|
||||
name = "The Apache License, Version 2.0"
|
||||
url = "http://www.apache.org/licenses/LICENSE-2.0.txt"
|
||||
}
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id = "tchiotludo"
|
||||
name = "Ludovic Dehon"
|
||||
email = "ldehon@kestra.io"
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection = 'scm:git:'
|
||||
url = "https://github.com/kestra-io/${rootProject.name}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
afterEvaluate {
|
||||
publishing {
|
||||
publications {
|
||||
withType(MavenPublication).configureEach { publication ->
|
||||
|
||||
if (subProject.name == "platform") {
|
||||
// Clear all artifacts except the BOM
|
||||
publication.artifacts.clear()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subProject.name == 'cli') {
|
||||
|
||||
/* Make sure the special publication is wired *after* every plugin */
|
||||
subProject.afterEvaluate {
|
||||
/* 1. Remove the default java component so Gradle stops expecting
|
||||
the standard cli-*.jar, sources, javadoc, etc. */
|
||||
components.removeAll { it.name == "java" }
|
||||
|
||||
/* 2. Replace the publication’s artifacts with shadow + exec */
|
||||
publishing.publications.withType(MavenPublication).configureEach { pub ->
|
||||
pub.artifacts.clear()
|
||||
|
||||
// main shadow JAR built at root
|
||||
pub.artifact(rootProject.tasks.named("shadowJar").get()) {
|
||||
extension = "jar"
|
||||
}
|
||||
|
||||
// executable ZIP built at root
|
||||
pub.artifact(rootProject.tasks.named("executableJar").get().archiveFile) {
|
||||
classifier = "exec"
|
||||
extension = "zip"
|
||||
}
|
||||
pub.artifact(tasks.named("sourcesJar").get())
|
||||
pub.artifact(tasks.named("javadocJar").get())
|
||||
|
||||
}
|
||||
|
||||
/* 3. Disable Gradle-module metadata for this publication to
|
||||
avoid the “artifact removed from java component” error. */
|
||||
tasks.withType(GenerateModuleMetadata).configureEach { it.enabled = false }
|
||||
|
||||
/* 4. Make every publish task in :cli wait for the two artifacts */
|
||||
tasks.matching { it.name.startsWith("publish") }.configureEach {
|
||||
dependsOn rootProject.tasks.named("shadowJar")
|
||||
dependsOn rootProject.tasks.named("executableJar")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(GenerateModuleMetadata).configureEach {
|
||||
// Suppression this validation error as we want to enforce the Kestra platform
|
||||
suppressedValidationErrors.add('enforced-platform')
|
||||
nexusPublishing {
|
||||
repositoryDescription = "${project.group}:${rootProject.name}:${project.version}"
|
||||
useStaging = !project.version.endsWith("-SNAPSHOT")
|
||||
repositories {
|
||||
sonatype {
|
||||
nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/"))
|
||||
snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply plugin: "maven-publish"
|
||||
apply plugin: 'signing'
|
||||
apply plugin: 'ru.vyarus.pom'
|
||||
apply plugin: 'ru.vyarus.github-info'
|
||||
|
||||
javadoc {
|
||||
options {
|
||||
locale = 'en_US'
|
||||
encoding = 'UTF-8'
|
||||
addStringOption("Xdoclint:none", "-quiet")
|
||||
}
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar) {
|
||||
dependsOn = [':core:copyGradleProperties']
|
||||
dependsOn = [':ui:assembleFrontend']
|
||||
archiveClassifier.set('sources')
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
sourcesJar.dependsOn ':core:copyGradleProperties'
|
||||
sourcesJar.dependsOn ':ui:assembleFrontend'
|
||||
|
||||
task javadocJar(type: Jar) {
|
||||
archiveClassifier.set('javadoc')
|
||||
from javadoc
|
||||
}
|
||||
|
||||
task testsJar(type: Jar) {
|
||||
archiveClassifier.set('tests')
|
||||
from sourceSets.test.output
|
||||
}
|
||||
|
||||
github {
|
||||
user 'kestra-io'
|
||||
license 'Apache'
|
||||
repository 'kestra'
|
||||
site 'https://kestra.io'
|
||||
}
|
||||
|
||||
maven.pom {
|
||||
description = 'The modern, scalable orchestrator & scheduler open source platform'
|
||||
|
||||
developers {
|
||||
developer {
|
||||
id = "tchiotludo"
|
||||
name = "Ludovic Dehon"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
sonatypePublication(MavenPublication) {
|
||||
version project.version
|
||||
|
||||
if (project.name.contains('cli')) {
|
||||
groupId "io.kestra"
|
||||
artifactId "kestra"
|
||||
|
||||
artifact shadowJar
|
||||
artifact executableJar
|
||||
} else if (project.name.contains('platform')){
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
} else {
|
||||
from components.java
|
||||
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
|
||||
artifact sourcesJar
|
||||
artifact javadocJar
|
||||
artifact testsJar
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
signing {
|
||||
// only sign JARs that we publish to Sonatype
|
||||
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
|
||||
sign publishing.publications.sonatypePublication
|
||||
}
|
||||
|
||||
tasks.withType(GenerateModuleMetadata).configureEach {
|
||||
// Suppression this validation error as we want to enforce the Kestra platform
|
||||
suppressedValidationErrors.add('enforced-platform')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
@@ -638,12 +562,4 @@ release {
|
||||
git {
|
||||
requireBranch.set('develop')
|
||||
}
|
||||
|
||||
// Dynamically set properties with default values
|
||||
failOnSnapshotDependencies = providers.gradleProperty("release.failOnSnapshotDependencies")
|
||||
.map(val -> Boolean.parseBoolean(val))
|
||||
.getOrElse(true)
|
||||
|
||||
pushReleaseVersionBranch = providers.gradleProperty("release.pushReleaseVersionBranch")
|
||||
.getOrElse(null)
|
||||
}
|
||||
|
||||
@@ -12,9 +12,18 @@ dependencies {
|
||||
implementation 'ch.qos.logback.contrib:logback-json-classic'
|
||||
implementation 'ch.qos.logback.contrib:logback-jackson'
|
||||
|
||||
// OTLP metrics
|
||||
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
|
||||
// plugins
|
||||
implementation 'org.eclipse.aether:aether-api'
|
||||
implementation 'org.eclipse.aether:aether-spi'
|
||||
implementation 'org.eclipse.aether:aether-util'
|
||||
implementation 'org.eclipse.aether:aether-impl'
|
||||
implementation 'org.eclipse.aether:aether-connector-basic'
|
||||
implementation 'org.eclipse.aether:aether-transport-file'
|
||||
implementation 'org.eclipse.aether:aether-transport-http'
|
||||
implementation('org.apache.maven:maven-aether-provider') {
|
||||
// sisu dependency injector is not used
|
||||
exclude group: 'org.eclipse.sisu'
|
||||
}
|
||||
// aether still use javax.inject
|
||||
compileOnly 'javax.inject:javax.inject:1'
|
||||
|
||||
@@ -34,7 +43,4 @@ dependencies {
|
||||
implementation project(":storage-local")
|
||||
|
||||
implementation project(":webserver")
|
||||
|
||||
//test
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package io.kestra.cli;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.micronaut.http.HttpHeaders;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
@@ -16,9 +14,6 @@ import io.micronaut.http.netty.body.NettyJsonHandler;
|
||||
import io.micronaut.json.JsonMapper;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URISyntaxException;
|
||||
@@ -48,18 +43,8 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@Nullable
|
||||
private HttpClientConfiguration httpClientConfiguration;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected DefaultHttpClient client() throws URISyntaxException {
|
||||
DefaultHttpClient defaultHttpClient = DefaultHttpClient.builder()
|
||||
.uri(server.toURI())
|
||||
.configuration(httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration())
|
||||
.build();
|
||||
DefaultHttpClient defaultHttpClient = new DefaultHttpClient(server.toURI(), httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration());
|
||||
MessageBodyHandlerRegistry defaultHandlerRegistry = defaultHttpClient.getHandlerRegistry();
|
||||
if (defaultHandlerRegistry instanceof ContextlessMessageBodyHandlerRegistry modifiableRegistry) {
|
||||
modifiableRegistry.add(MediaType.TEXT_JSON_TYPE, new NettyJsonHandler<>(JsonMapper.createDefault()));
|
||||
@@ -92,14 +77,6 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
throw new IllegalArgumentException("'path' must be non-null and start with '/'");
|
||||
}
|
||||
|
||||
return tenantId == null ? "/api/v1/" + MAIN_TENANT + path : "/api/v1/" + tenantId + path;
|
||||
}
|
||||
|
||||
@Builder
|
||||
@Value
|
||||
@Jacksonized
|
||||
public static class UpdateResult {
|
||||
String id;
|
||||
String namespace;
|
||||
return tenantId == null ? "/api/v1" + path : "/api/v1/" + tenantId + path;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,17 +4,16 @@ import ch.qos.logback.classic.LoggerContext;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.commands.servers.ServerCommandInterface;
|
||||
import io.kestra.cli.services.StartupHookInterface;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.webserver.services.FlowAutoLoaderService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.yaml.YamlPropertySourceLoader;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import io.micronaut.management.endpoint.EndpointDefaultConfiguration;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import jakarta.inject.Provider;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import io.kestra.core.utils.Rethrow;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -27,13 +26,10 @@ import java.nio.file.Paths;
|
||||
import java.text.MessageFormat;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Callable;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
versionProvider = VersionProvider.class,
|
||||
mixinStandardHelpOptions = true,
|
||||
showDefaultValues = true
|
||||
@@ -53,28 +49,22 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
@Inject
|
||||
private io.kestra.core.utils.VersionProvider versionProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginRegistry> pluginRegistryProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginManager> pluginManagerProvider;
|
||||
|
||||
private PluginRegistry pluginRegistry;
|
||||
|
||||
@Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
@CommandLine.Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
private boolean[] verbose = new boolean[0];
|
||||
|
||||
@Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
@CommandLine.Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
private LogLevel logLevel = LogLevel.INFO;
|
||||
|
||||
@Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
@CommandLine.Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
private boolean internalLog = false;
|
||||
|
||||
@Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
@CommandLine.Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
private Path config = Paths.get(System.getProperty("user.home"), ".kestra/config.yml");
|
||||
|
||||
@Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = Optional.ofNullable(System.getenv("KESTRA_PLUGINS_PATH")).map(Paths::get).orElse(null);
|
||||
@CommandLine.Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = System.getenv("KESTRA_PLUGINS_PATH") != null ? Paths.get(System.getenv("KESTRA_PLUGINS_PATH")) : null;
|
||||
|
||||
public enum LogLevel {
|
||||
TRACE,
|
||||
@@ -86,7 +76,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(Command.class).name());
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(CommandLine.Command.class).name());
|
||||
startLogger();
|
||||
sendServerLog();
|
||||
if (this.startupHook != null) {
|
||||
@@ -94,14 +84,8 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
}
|
||||
|
||||
if (this.pluginsPath != null && loadExternalPlugins()) {
|
||||
pluginRegistry = pluginRegistryProvider.get();
|
||||
pluginRegistry = pluginRegistry();
|
||||
pluginRegistry.registerIfAbsent(pluginsPath);
|
||||
|
||||
// PluginManager mus only be initialized if a registry is also instantiated
|
||||
if (isPluginManagerEnabled()) {
|
||||
PluginManager manager = pluginManagerProvider.get();
|
||||
manager.start();
|
||||
}
|
||||
}
|
||||
|
||||
startWebserver();
|
||||
@@ -118,15 +102,8 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies whether the {@link PluginManager} service must be initialized.
|
||||
* <p>
|
||||
* This method can be overridden by concrete commands.
|
||||
*
|
||||
* @return {@code true} if the {@link PluginManager} service must be initialized.
|
||||
*/
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return true;
|
||||
protected PluginRegistry pluginRegistry() {
|
||||
return KestraContext.getContext().getPluginRegistry(); // Lazy init
|
||||
}
|
||||
|
||||
private static String message(String message, Object... format) {
|
||||
@@ -180,6 +157,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
logger.getName().startsWith("io.kestra") &&
|
||||
!logger.getName().startsWith("io.kestra.ee.runner.kafka.services"))
|
||||
)
|
||||
|| logger.getName().startsWith("flow")
|
||||
)
|
||||
.forEach(
|
||||
logger -> logger.setLevel(ch.qos.logback.classic.Level.valueOf(this.logLevel.name()))
|
||||
@@ -205,9 +183,9 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
if (this.endpointConfiguration.getPort().isPresent()) {
|
||||
URI endpoint = null;
|
||||
try {
|
||||
endpoint = UriBuilder.of(server.getURL().toURI())
|
||||
.port(this.endpointConfiguration.getPort().get())
|
||||
.path("/health")
|
||||
endpoint = new URIBuilder(server.getURL().toURI())
|
||||
.setPort(this.endpointConfiguration.getPort().get())
|
||||
.setPath("/health")
|
||||
.build();
|
||||
} catch (URISyntaxException e) {
|
||||
e.printStackTrace();
|
||||
@@ -229,12 +207,10 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected void shutdownHook(boolean logShutdown, Rethrow.RunnableChecked<Exception> run) {
|
||||
protected void shutdownHook(Rethrow.RunnableChecked<Exception> run) {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(
|
||||
() -> {
|
||||
if (logShutdown) {
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
}
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
try {
|
||||
run.run();
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
package io.kestra.cli;
|
||||
|
||||
import io.kestra.cli.commands.flows.FlowValidateCommand;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.models.validations.ValidateConstraintViolation;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.serializers.YamlFlowParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
@@ -31,12 +32,6 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "the directory containing files to check")
|
||||
protected Path directory;
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return local;
|
||||
}
|
||||
|
||||
public static void handleException(ConstraintViolationException e, String resource) {
|
||||
stdErr("\t@|fg(red) Unable to parse {0} due to the following error(s):|@", resource);
|
||||
e.getConstraintViolations()
|
||||
@@ -68,19 +63,19 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
public static String buildYamlBody(Path directory) throws IOException {
|
||||
try(var files = Files.walk(directory)) {
|
||||
return files.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.filter(YamlFlowParser::isValidExtension)
|
||||
.map(throwFunction(path -> Files.readString(path, Charset.defaultCharset())))
|
||||
.collect(Collectors.joining("\n---\n"));
|
||||
}
|
||||
}
|
||||
|
||||
// bug in micronaut, we can't inject ModelValidator, so we inject from implementation
|
||||
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation
|
||||
public Integer call(
|
||||
Class<?> cls,
|
||||
YamlFlowParser yamlFlowParser,
|
||||
ModelValidator modelValidator,
|
||||
Function<Object, String> identity,
|
||||
Function<Object, List<String>> warningsFunction,
|
||||
Function<Object, List<String>> infosFunction
|
||||
Function<Object, List<String>> warningsFunction
|
||||
) throws Exception {
|
||||
super.call();
|
||||
|
||||
@@ -90,16 +85,14 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
if(this.local) {
|
||||
try(var files = Files.walk(directory)) {
|
||||
files.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.filter(YamlFlowParser::isValidExtension)
|
||||
.forEach(path -> {
|
||||
try {
|
||||
Object parse = YamlParser.parse(path.toFile(), cls);
|
||||
Object parse = yamlFlowParser.parse(path.toFile(), cls);
|
||||
modelValidator.validate(parse);
|
||||
stdOut("@|green \u2713|@ - " + identity.apply(parse));
|
||||
List<String> warnings = warningsFunction.apply(parse);
|
||||
warnings.forEach(warning -> stdOut("@|bold,yellow \u26A0|@ - " + warning));
|
||||
List<String> infos = infosFunction.apply(parse);
|
||||
infos.forEach(info -> stdOut("@|bold,blue \u2139|@ - " + info));
|
||||
} catch (ConstraintViolationException e) {
|
||||
stdErr("@|red \u2718|@ - " + path);
|
||||
AbstractValidateCommand.handleException(e, clsName);
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli;
|
||||
|
||||
import io.kestra.cli.commands.configs.sys.ConfigCommand;
|
||||
import io.kestra.cli.commands.flows.FlowCommand;
|
||||
import io.kestra.cli.commands.migrations.MigrationCommand;
|
||||
import io.kestra.cli.commands.namespaces.NamespaceCommand;
|
||||
import io.kestra.cli.commands.plugins.PluginCommand;
|
||||
import io.kestra.cli.commands.servers.ServerCommand;
|
||||
@@ -43,7 +42,6 @@ import java.util.concurrent.Callable;
|
||||
SysCommand.class,
|
||||
ConfigCommand.class,
|
||||
NamespaceCommand.class,
|
||||
MigrationCommand.class,
|
||||
}
|
||||
)
|
||||
@Introspected
|
||||
@@ -90,12 +88,11 @@ public class App implements Callable<Integer> {
|
||||
.environments(Environment.CLI);
|
||||
|
||||
CommandLine cmd = new CommandLine(mainClass, CommandLine.defaultFactory());
|
||||
continueOnParsingErrors(cmd);
|
||||
|
||||
CommandLine.ParseResult parseResult = cmd.parseArgs(args);
|
||||
List<CommandLine> parsedCommands = parseResult.asCommandLineList();
|
||||
|
||||
CommandLine commandLine = parsedCommands.getLast();
|
||||
CommandLine commandLine = parsedCommands.get(parsedCommands.size() - 1);
|
||||
Class<?> cls = commandLine.getCommandSpec().userObject().getClass();
|
||||
|
||||
if (AbstractCommand.class.isAssignableFrom(cls)) {
|
||||
@@ -117,17 +114,15 @@ public class App implements Callable<Integer> {
|
||||
.stream()
|
||||
.filter(argSpec -> ((Field) argSpec.userObject()).getName().equals("serverPort"))
|
||||
.findFirst()
|
||||
.ifPresent(argSpec -> properties.put("micronaut.server.port", argSpec.getValue()));
|
||||
.ifPresent(argSpec -> {
|
||||
properties.put("micronaut.server.port", argSpec.getValue());
|
||||
});
|
||||
|
||||
builder.properties(properties);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static void continueOnParsingErrors(CommandLine cmd) {
|
||||
cmd.getCommandSpec().parser().collectErrors(true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T getPropertiesFromMethod(Class<?> cls, String methodName, Object instance) {
|
||||
try {
|
||||
|
||||
@@ -1,18 +1,28 @@
|
||||
package io.kestra.cli.commands;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
public abstract class AbstractServiceNamespaceUpdateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The namespace to update")
|
||||
@CommandLine.Parameters(index = "0", description = "the namespace to update")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "1", description = "The directory containing flow files for current namespace")
|
||||
@CommandLine.Parameters(index = "1", description = "the directory containing files for current namespace")
|
||||
public Path directory;
|
||||
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "if missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@Builder
|
||||
@Value
|
||||
@Jacksonized
|
||||
public static class UpdateResult {
|
||||
String id;
|
||||
String namespace;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "configs",
|
||||
description = "Manage configuration",
|
||||
description = "handle configs",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
ConfigPropertiesCommand.class,
|
||||
|
||||
@@ -10,7 +10,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "properties",
|
||||
description = {"Display current configuration properties."}
|
||||
description = {"Display actual configurations properties."}
|
||||
)
|
||||
@Slf4j
|
||||
public class ConfigPropertiesCommand extends AbstractCommand {
|
||||
|
||||
@@ -10,7 +10,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "flow",
|
||||
description = "Manage flows",
|
||||
description = "handle flows",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
FlowValidateCommand.class,
|
||||
@@ -18,8 +18,6 @@ import picocli.CommandLine;
|
||||
FlowNamespaceCommand.class,
|
||||
FlowDotCommand.class,
|
||||
FlowExportCommand.class,
|
||||
FlowUpdateCommand.class,
|
||||
FlowUpdatesCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "create",
|
||||
description = "Create a single flow",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowCreateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
|
||||
public Path flowFile;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
checkFile();
|
||||
|
||||
String body = Files.readString(flowFile);
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows"), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
String.class
|
||||
);
|
||||
|
||||
stdOut("Flow successfully created !");
|
||||
} catch (HttpClientResponseException e){
|
||||
AbstractValidateCommand.handleHttpException(e, "flow");
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
protected void checkFile() {
|
||||
if (!Files.isRegularFile(flowFile)) {
|
||||
throw new IllegalArgumentException("The file '" + flowFile.toFile().getAbsolutePath() + "' is not a file");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "delete",
|
||||
description = "Delete a single flow",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowDeleteCommand extends AbstractApiCommand {
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The namespace of the flow")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "1", description = "The ID of the flow")
|
||||
public String id;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.DELETE(apiUri("/flows/" + namespace + "/" + id ));
|
||||
|
||||
client.toBlocking().exchange(
|
||||
this.requestOptions(request)
|
||||
);
|
||||
|
||||
stdOut("Flow successfully deleted !");
|
||||
} catch (HttpClientResponseException e){
|
||||
AbstractValidateCommand.handleHttpException(e, "flow");
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ package io.kestra.cli.commands.flows;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.hierarchies.GraphCluster;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.serializers.YamlFlowParser;
|
||||
import io.kestra.core.services.Graph2DotService;
|
||||
import io.kestra.core.utils.GraphUtils;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -15,21 +15,22 @@ import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "dot",
|
||||
description = "Generate a DOT graph from a file"
|
||||
description = "generate a dot graph from a file"
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowDotCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The flow file to display")
|
||||
@CommandLine.Parameters(index = "0", description = "the flow file to display")
|
||||
private Path file;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
Flow flow = YamlParser.parse(file.toFile(), Flow.class);
|
||||
YamlFlowParser parser = applicationContext.getBean(YamlFlowParser.class);
|
||||
Flow flow = parser.parse(file.toFile(), Flow.class);
|
||||
|
||||
GraphCluster graph = GraphUtils.of(flow, null);
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ package io.kestra.cli.commands.flows;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.serializers.YamlFlowParser;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -12,14 +12,17 @@ import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "expand",
|
||||
description = "Deprecated - expand a flow"
|
||||
description = "deprecated - expand a flow"
|
||||
)
|
||||
@Deprecated
|
||||
public class FlowExpandCommand extends AbstractCommand {
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
|
||||
@CommandLine.Parameters(index = "0", description = "the flow file to expand")
|
||||
private Path file;
|
||||
|
||||
@Inject
|
||||
private YamlFlowParser yamlFlowParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@@ -28,7 +31,7 @@ public class FlowExpandCommand extends AbstractCommand {
|
||||
super.call();
|
||||
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
|
||||
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
|
||||
Flow flow = YamlParser.parse(content, Flow.class);
|
||||
Flow flow = yamlFlowParser.parse(content, Flow.class);
|
||||
modelValidator.validate(flow);
|
||||
stdOut(content);
|
||||
return 0;
|
||||
|
||||
@@ -18,7 +18,7 @@ import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "export",
|
||||
description = "Export flows to a ZIP file",
|
||||
description = "export flows to a zip file",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
@@ -29,10 +29,10 @@ public class FlowExportCommand extends AbstractApiCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of flows to export")
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "the namespace of flows to export")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The directory to export the ZIP file to")
|
||||
@CommandLine.Parameters(index = "0", description = "the directory to export the file to")
|
||||
public Path directory;
|
||||
|
||||
@Override
|
||||
|
||||
@@ -27,19 +27,19 @@ import java.util.concurrent.TimeoutException;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "test",
|
||||
description = "Test a flow"
|
||||
description = "test a flow"
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowTestCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The flow file to test")
|
||||
@CommandLine.Parameters(index = "0", description = "the flow file to test")
|
||||
private Path file;
|
||||
|
||||
@CommandLine.Parameters(
|
||||
index = "1..*",
|
||||
description = "The inputs to pass as key pair value separated by space, " +
|
||||
description = "the inputs to pass as key pair value separated by space, " +
|
||||
"for input type file, you need to pass an absolute path."
|
||||
)
|
||||
private List<String> inputs = new ArrayList<>();
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "update",
|
||||
description = "Update a single flow",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowUpdateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
|
||||
public Path flowFile;
|
||||
|
||||
@CommandLine.Parameters(index = "1", description = "The namespace of the flow")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "2", description = "The ID of the flow")
|
||||
public String id;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
checkFile();
|
||||
|
||||
String body = Files.readString(flowFile);
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.PUT(apiUri("/flows/" + namespace + "/" + id ), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
String.class
|
||||
);
|
||||
|
||||
stdOut("Flow successfully updated !");
|
||||
} catch (HttpClientResponseException e){
|
||||
AbstractValidateCommand.handleHttpException(e, "flow");
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
protected void checkFile() {
|
||||
if (!Files.isRegularFile(flowFile)) {
|
||||
throw new IllegalArgumentException("The file '" + flowFile.toFile().getAbsolutePath() + "' is not a file");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "updates",
|
||||
description = "Create or update flows from a folder, and optionally delete the ones not present",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The directory containing files")
|
||||
public Path directory;
|
||||
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
|
||||
public String namespace;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try (var files = Files.walk(directory)) {
|
||||
List<String> flows = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> {
|
||||
try {
|
||||
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
})
|
||||
.toList();
|
||||
|
||||
String body = "";
|
||||
if (flows.isEmpty()) {
|
||||
stdOut("No flow found on '{}'", directory.toFile().getAbsolutePath());
|
||||
} else {
|
||||
body = String.join("\n---\n", flows);
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
String namespaceQuery = "";
|
||||
if (namespace != null) {
|
||||
namespaceQuery = "&namespace=" + namespace;
|
||||
}
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/bulk") + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
Argument.listOf(UpdateResult.class)
|
||||
);
|
||||
|
||||
stdOut(updated.size() + " flow(s) successfully updated !");
|
||||
updated.forEach(flow -> stdOut("- " + flow.getNamespace() + "." + flow.getId()));
|
||||
} catch (HttpClientResponseException e){
|
||||
AbstractValidateCommand.handleHttpException(e, "flow");
|
||||
return 1;
|
||||
}
|
||||
} catch (ConstraintViolationException e) {
|
||||
AbstractValidateCommand.handleException(e, "flow");
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlFlowParser;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
@@ -12,9 +13,11 @@ import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "validate",
|
||||
description = "Validate a flow"
|
||||
description = "validate a flow"
|
||||
)
|
||||
public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlFlowParser yamlFlowParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -25,22 +28,20 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
FlowWithSource.class,
|
||||
Flow.class,
|
||||
yamlFlowParser,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
Flow flow = (Flow) object;
|
||||
return flow.getNamespace() + " / " + flow.getId();
|
||||
},
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
Flow flow = (Flow) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow, this.tenantId));
|
||||
warnings.addAll(flowService.relocations(flow.generateSource()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList());
|
||||
warnings.addAll(flowService.warnings(flow));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "namespace",
|
||||
description = "Manage namespace flows",
|
||||
description = "handle namespace flows",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
FlowNamespaceUpdateCommand.class,
|
||||
|
||||
@@ -2,18 +2,20 @@ package io.kestra.cli.commands.flows.namespaces;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.commands.flows.FlowValidateCommand;
|
||||
import io.kestra.cli.commands.flows.IncludeHelperExpander;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.serializers.YamlFlowParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
@@ -21,14 +23,13 @@ import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "update",
|
||||
description = "Update flows in namespace",
|
||||
description = "handle namespace flows",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
|
||||
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
|
||||
public boolean override = false;
|
||||
@Inject
|
||||
public YamlFlowParser yamlFlowParser;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
@@ -38,7 +39,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
|
||||
try (var files = Files.walk(directory)) {
|
||||
List<String> flows = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.filter(YamlFlowParser::isValidExtension)
|
||||
.map(path -> {
|
||||
try {
|
||||
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
|
||||
@@ -54,9 +55,6 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
|
||||
} else {
|
||||
body = String.join("\n---\n", flows);
|
||||
}
|
||||
if (override) {
|
||||
body = body.replaceAll("(?m)^namespace:.+", "namespace: " + namespace);
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/") + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "migrate",
|
||||
description = "handle migrations",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TenantMigrationCommand.class,
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class MigrationCommand extends AbstractCommand {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "migrate", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.repositories.TenantMigrationInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "default-tenant",
|
||||
description = "migrate every elements from no tenant to the main tenant"
|
||||
)
|
||||
@Slf4j
|
||||
public class TenantMigrationCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Option(names = "--tenant-id", description = "tenant identifier")
|
||||
String tenantId;
|
||||
|
||||
@Option(names = "--tenant-name", description = "tenant name")
|
||||
String tenantName;
|
||||
|
||||
@Option(names = "--dry-run", description = "Preview only, do not update")
|
||||
boolean dryRun;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (dryRun) {
|
||||
System.out.println("🧪 Dry-run mode enabled. No changes will be applied.");
|
||||
}
|
||||
|
||||
TenantMigrationService migrationService = this.applicationContext.getBean(TenantMigrationService.class);
|
||||
try {
|
||||
migrationService.migrateTenant(tenantId, tenantName, dryRun);
|
||||
System.out.println("✅ Tenant migration complete.");
|
||||
} catch (Exception e) {
|
||||
System.err.println("❌ Tenant migration failed: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
import com.github.javaparser.utils.Log;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.TenantMigrationInterface;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class TenantMigrationService {
|
||||
|
||||
@Inject
|
||||
private TenantMigrationInterface tenantMigrationInterface;
|
||||
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.FLOW_NAMED)
|
||||
private QueueInterface<FlowInterface> flowQueue;
|
||||
|
||||
public void migrateTenant(String tenantId, String tenantName, boolean dryRun) {
|
||||
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
|
||||
throw new KestraRuntimeException("Tenant configuration is an enterprise feature. It can only be main in OSS");
|
||||
}
|
||||
|
||||
Log.info("🔁 Starting tenant migration...");
|
||||
tenantMigrationInterface.migrateTenant(MAIN_TENANT, dryRun);
|
||||
migrateQueue(dryRun);
|
||||
}
|
||||
|
||||
protected void migrateQueue(boolean dryRun) {
|
||||
if (!dryRun){
|
||||
log.info("🔁 Starting restoring queue...");
|
||||
flowRepository.findAllWithSourceForAllTenants().forEach(flow -> {
|
||||
try {
|
||||
flowQueue.emit(flow);
|
||||
} catch (QueueException e) {
|
||||
log.warn("Unable to send the flow {} to the queue", flow.uid(), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -11,7 +11,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "namespace",
|
||||
description = "Manage namespaces",
|
||||
description = "handle namespaces",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
NamespaceFilesCommand.class,
|
||||
|
||||
@@ -9,7 +9,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "files",
|
||||
description = "Manage namespace files",
|
||||
description = "handle namespace files",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
NamespaceFilesUpdateCommand.class,
|
||||
|
||||
@@ -17,21 +17,21 @@ import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "update",
|
||||
description = "Update namespace files",
|
||||
description = "update namespace files",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The namespace to update")
|
||||
@CommandLine.Parameters(index = "0", description = "the namespace to update")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "1", description = "The local directory containing files for current namespace")
|
||||
@CommandLine.Parameters(index = "1", description = "the local directory containing files for current namespace")
|
||||
public Path from;
|
||||
|
||||
@CommandLine.Parameters(index = "2", description = "The remote namespace path to upload files to", defaultValue = "/")
|
||||
@CommandLine.Parameters(index = "2", description = "the remote namespace path to upload files to", defaultValue = "/")
|
||||
public String to;
|
||||
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "if missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
private static final String KESTRA_IGNORE_FILE = ".kestraignore";
|
||||
|
||||
@@ -9,7 +9,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "kv",
|
||||
description = "Manage KV Store",
|
||||
description = "handle KV Store",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
KvUpdateCommand.class,
|
||||
|
||||
@@ -18,28 +18,28 @@ import java.time.Duration;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "update",
|
||||
description = "Update value for a KV Store key",
|
||||
description = "update value for a KV Store key",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class KvUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The namespace to update")
|
||||
@CommandLine.Parameters(index = "0", description = "the namespace to update")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "1", description = "The key to update")
|
||||
@CommandLine.Parameters(index = "1", description = "the key to update")
|
||||
public String key;
|
||||
|
||||
@CommandLine.Parameters(index = "2", description = "The value to assign to the key. If the value is an object, it must be in JSON format. If the value must be read from file, use -f parameter.")
|
||||
@CommandLine.Parameters(index = "2", description = "the value to assign to the key. If the value is an object, it must be in JSON format. If the value must be read from file, use -f parameter.")
|
||||
public String value;
|
||||
|
||||
@Option(names = {"-e", "--expiration"}, description = "The duration after which the key should expire.")
|
||||
@Option(names = {"-e", "--expiration"}, description = "the duration after which the key should expire.")
|
||||
public String expiration;
|
||||
|
||||
@Option(names = {"-t", "--type"}, description = "The type of the value. Optional and useful to override the deduced type (eg. numbers, booleans or JSON as full string). Valid values: ${COMPLETION-CANDIDATES}.")
|
||||
@Option(names = {"-t", "--type"}, description = "the type of the value. Optional and useful to override the deduced type (eg. numbers, booleans or JSON as full string). Valid values: ${COMPLETION-CANDIDATES}.")
|
||||
public Type type;
|
||||
|
||||
@Option(names = {"-f", "--file-value"}, description = "The file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
|
||||
@Option(names = {"-f", "--file-value"}, description = "the file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
|
||||
public Path fileValue;
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,37 +1,31 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import picocli.CommandLine.Command;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
name = "plugins",
|
||||
description = "Manage plugins",
|
||||
description = "handle plugins",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
PluginInstallCommand.class,
|
||||
PluginUninstallCommand.class,
|
||||
PluginListCommand.class,
|
||||
PluginDocCommand.class,
|
||||
PluginSearchCommand.class
|
||||
PluginDocCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class PluginCommand extends AbstractCommand {
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import com.google.common.io.Files;
|
||||
import com.google.common.base.Charsets;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.docs.DocumentationGenerator;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
@@ -20,13 +19,13 @@ import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "doc",
|
||||
description = "Generate documentation for all plugins currently installed"
|
||||
description = "write documentation for all plugins currently installed"
|
||||
)
|
||||
public class PluginDocCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "Path to write documentation files")
|
||||
@CommandLine.Parameters(index = "0", description = "Path to write documentations files")
|
||||
private Path output = Paths.get(System.getProperty("user.dir"), "docs");
|
||||
|
||||
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks docs files")
|
||||
@@ -35,78 +34,51 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
@CommandLine.Option(names = {"--icons"}, description = "Also write icon for each task")
|
||||
private boolean icons = false;
|
||||
|
||||
@CommandLine.Option(names = {"--schema"}, description = "Also write JSON Schema for each task")
|
||||
private boolean schema = false;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
DocumentationGenerator documentationGenerator = applicationContext.getBean(DocumentationGenerator.class);
|
||||
|
||||
PluginRegistry registry = pluginRegistryProvider.get();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
boolean hasFailures = false;
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
for (RegisteredPlugin registeredPlugin : plugins) {
|
||||
try {
|
||||
documentationGenerator
|
||||
.generate(registeredPlugin)
|
||||
.forEach(s -> {
|
||||
File file = Paths.get(output.toAbsolutePath().toString(), s.getPath()).toFile();
|
||||
documentationGenerator
|
||||
.generate(registeredPlugin)
|
||||
.forEach(s -> {
|
||||
File file = Paths.get(output.toAbsolutePath().toString(), s.getPath()).toFile();
|
||||
|
||||
if (!file.getParentFile().exists()) {
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
file.getParentFile().mkdirs();
|
||||
}
|
||||
|
||||
try {
|
||||
Files
|
||||
.asCharSink(
|
||||
file,
|
||||
StandardCharsets.UTF_8
|
||||
).write(s.getBody());
|
||||
stdOut("Generate doc in: {0}", file);
|
||||
|
||||
if (s.getIcon() != null && this.icons) {
|
||||
File iconFile = new File(
|
||||
file.getParent(),
|
||||
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".svg"
|
||||
);
|
||||
|
||||
Files
|
||||
.asByteSink(iconFile)
|
||||
.write(Base64.getDecoder().decode(s.getIcon().getBytes(StandardCharsets.UTF_8)));
|
||||
stdOut("Generate icon in: {0}", iconFile);
|
||||
}
|
||||
|
||||
if (this.schema && s.getSchema() != null) {
|
||||
File jsonSchemaFile = new File(
|
||||
file.getParent(),
|
||||
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".json"
|
||||
);
|
||||
|
||||
Files
|
||||
.asByteSink(jsonSchemaFile)
|
||||
.write(JacksonMapper.ofJson().writeValueAsBytes(s.getSchema()));
|
||||
stdOut("Generate json schema in: {0}", jsonSchemaFile);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
if (!file.getParentFile().exists()) {
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
file.getParentFile().mkdirs();
|
||||
}
|
||||
);
|
||||
} catch (Error e) {
|
||||
stdErr("Failure to generate documentation for plugin {0}: {1}", registeredPlugin.name(), e);
|
||||
hasFailures = true;
|
||||
}
|
||||
|
||||
try {
|
||||
com.google.common.io.Files
|
||||
.asCharSink(
|
||||
file,
|
||||
Charsets.UTF_8
|
||||
).write(s.getBody());
|
||||
stdOut("Generate doc in: {0}", file);
|
||||
|
||||
if (s.getIcon() != null && this.icons) {
|
||||
File iconFile = new File(
|
||||
file.getParent(),
|
||||
file.getName().substring(0, file.getName().lastIndexOf(".")) + ".svg"
|
||||
);
|
||||
|
||||
com.google.common.io.Files
|
||||
.asByteSink(iconFile)
|
||||
.write(Base64.getDecoder().decode(s.getIcon().getBytes(StandardCharsets.UTF_8)));
|
||||
stdOut("Generate icon in: {0}", iconFile);
|
||||
}
|
||||
|
||||
stdOut("Generate doc in: {0}", file);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return hasFailures ? 1 : 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return false;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,133 +1,100 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginCatalogService;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.plugins.PluginDownloader;
|
||||
import io.kestra.cli.plugins.RepositoryConfig;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import jakarta.inject.Provider;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
@Command(
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "install",
|
||||
description = "Install plugins"
|
||||
description = "install a plugin"
|
||||
)
|
||||
public class PluginInstallCommand extends AbstractCommand {
|
||||
|
||||
@Option(names = {"--locally"}, description = "Specifies if plugins must be installed locally. If set to false the installation depends on your Kestra configuration.")
|
||||
boolean locally = true;
|
||||
|
||||
@Option(names = {"--all"}, description = "Install all available plugins")
|
||||
boolean all = false;
|
||||
|
||||
@Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
@CommandLine.Parameters(index = "0..*", description = "the plugins to install")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
@CommandLine.Option(names = {"--repositories"}, description = "url to additional maven repositories")
|
||||
private URI[] repositories;
|
||||
|
||||
@Spec
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Inject
|
||||
Provider<PluginCatalogService> pluginCatalogService;
|
||||
private PluginDownloader pluginDownloader;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (this.locally && this.pluginsPath == null) {
|
||||
if (this.pluginsPath == null) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " +
|
||||
"or environment variable 'KESTRA_PLUGINS_PATH"
|
||||
);
|
||||
}
|
||||
|
||||
List<MavenPluginRepositoryConfig> repositoryConfigs = List.of();
|
||||
if (!pluginsPath.toFile().exists()) {
|
||||
if (!pluginsPath.toFile().mkdir()) {
|
||||
throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
if (repositories != null) {
|
||||
repositoryConfigs = Arrays.stream(repositories)
|
||||
.map(uri -> {
|
||||
MavenPluginRepositoryConfig.MavenPluginRepositoryConfigBuilder builder = MavenPluginRepositoryConfig
|
||||
.builder()
|
||||
Arrays.stream(repositories)
|
||||
.forEach(throwConsumer(s -> {
|
||||
URIBuilder uriBuilder = new URIBuilder(s);
|
||||
|
||||
RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder()
|
||||
.id(IdUtils.create());
|
||||
|
||||
String userInfo = uri.getUserInfo();
|
||||
if (userInfo != null) {
|
||||
String[] userInfoParts = userInfo.split(":");
|
||||
builder = builder.basicAuth(new MavenPluginRepositoryConfig.BasicAuth(
|
||||
userInfoParts[0],
|
||||
userInfoParts[1]
|
||||
if (uriBuilder.getUserInfo() != null) {
|
||||
int index = uriBuilder.getUserInfo().indexOf(":");
|
||||
|
||||
builder.basicAuth(new RepositoryConfig.BasicAuth(
|
||||
uriBuilder.getUserInfo().substring(0, index),
|
||||
uriBuilder.getUserInfo().substring(index + 1)
|
||||
));
|
||||
|
||||
uriBuilder.setUserInfo(null);
|
||||
}
|
||||
builder.url(UriBuilder.of(uri).userInfo(null).build().toString());
|
||||
return builder.build();
|
||||
}).toList();
|
||||
|
||||
builder.url(uriBuilder.build().toString());
|
||||
|
||||
pluginDownloader.addRepository(builder.build());
|
||||
}));
|
||||
}
|
||||
|
||||
if (all) {
|
||||
PluginCatalogService service = pluginCatalogService.get();
|
||||
dependencies = service.get().stream().map(Objects::toString).toList();
|
||||
List<URL> resolveUrl = pluginDownloader.resolve(dependencies);
|
||||
stdOut("Resolved Plugin(s) with {0}", resolveUrl);
|
||||
|
||||
for (URL url: resolveUrl) {
|
||||
Files.copy(
|
||||
Paths.get(url.toURI()),
|
||||
Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
);
|
||||
}
|
||||
|
||||
if (dependencies.isEmpty()) {
|
||||
stdErr("Error: No plugin to install.");
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath);
|
||||
|
||||
final List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
try (final PluginManager pluginManager = getPluginManager()) {
|
||||
|
||||
List<PluginArtifact> installed;
|
||||
if (all) {
|
||||
installed = new ArrayList<>(pluginArtifacts.size());
|
||||
for (PluginArtifact pluginArtifact : pluginArtifacts) {
|
||||
try {
|
||||
installed.add(pluginManager.install(pluginArtifact, repositoryConfigs, false, pluginsPath));
|
||||
} catch (KestraRuntimeException e) {
|
||||
String cause = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
|
||||
stdErr("Failed to install plugin {0}. Cause: {1}", pluginArtifact, cause);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
installed = pluginManager.install(pluginArtifacts, repositoryConfigs, false, pluginsPath);
|
||||
}
|
||||
|
||||
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
}
|
||||
|
||||
private PluginManager getPluginManager() {
|
||||
return locally ? new LocalPluginManager(mavenPluginRepositoryProvider.get()) : this.pluginManagerProvider.get();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,31 +1,22 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
name = "list",
|
||||
description = "List all plugins already installed"
|
||||
description = "list all plugins already installed"
|
||||
)
|
||||
public class PluginListCommand extends AbstractCommand {
|
||||
@Spec
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
private boolean core = false;
|
||||
|
||||
@Inject
|
||||
private PluginRegistry registry;
|
||||
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -35,9 +26,8 @@ public class PluginListCommand extends AbstractCommand {
|
||||
"or environment variable 'KESTRA_PLUGINS_PATH"
|
||||
);
|
||||
}
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
plugins.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
name = "search",
|
||||
description = "Search for available Kestra plugins"
|
||||
)
|
||||
public class PluginSearchCommand extends AbstractCommand {
|
||||
@Inject
|
||||
@Client("api")
|
||||
private HttpClient httpClient;
|
||||
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
private static final char SPACE = ' ';
|
||||
|
||||
@Parameters(index = "0", description = "Search term (optional)", defaultValue = "")
|
||||
private String searchTerm;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try {
|
||||
JsonNode root = fetchPlugins();
|
||||
List<PluginInfo> plugins = findPlugins(root);
|
||||
printResults(plugins);
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
stdOut("Error processing plugins: {0}", e.getMessage());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private JsonNode fetchPlugins() throws Exception {
|
||||
String response = httpClient.toBlocking()
|
||||
.retrieve(
|
||||
HttpRequest.GET("/v1/plugins")
|
||||
.header("Accept", "application/json")
|
||||
);
|
||||
return MAPPER.readTree(response);
|
||||
}
|
||||
|
||||
private List<PluginInfo> findPlugins(JsonNode root) {
|
||||
String searchTermLower = searchTerm.toLowerCase();
|
||||
List<PluginInfo> plugins = new ArrayList<>();
|
||||
|
||||
for (JsonNode plugin : root) {
|
||||
if (matchesSearch(plugin, searchTermLower)) {
|
||||
plugins.add(new PluginInfo(
|
||||
plugin.path("name").asText(),
|
||||
plugin.path("title").asText(),
|
||||
plugin.path("group").asText(),
|
||||
plugin.path("version").asText("")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
plugins.sort((p1, p2) -> p1.name.compareToIgnoreCase(p2.name));
|
||||
return plugins;
|
||||
}
|
||||
|
||||
private boolean matchesSearch(JsonNode plugin, String term) {
|
||||
if (term.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return plugin.path("name").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("title").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("group").asText().toLowerCase().contains(term);
|
||||
}
|
||||
|
||||
private void printResults(List<PluginInfo> plugins) {
|
||||
if (plugins.isEmpty()) {
|
||||
stdOut("No plugins found{0}",
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'");
|
||||
return;
|
||||
}
|
||||
|
||||
stdOut("\nFound {0} plugins{1}",
|
||||
plugins.size(),
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'"
|
||||
);
|
||||
|
||||
printPluginsTable(plugins);
|
||||
}
|
||||
|
||||
private void printPluginsTable(List<PluginInfo> plugins) {
|
||||
int maxName = 4, maxTitle = 5, maxGroup = 5;
|
||||
for (PluginInfo plugin : plugins) {
|
||||
maxName = Math.max(maxName, plugin.name.length());
|
||||
maxTitle = Math.max(maxTitle, plugin.title.length());
|
||||
maxGroup = Math.max(maxGroup, plugin.group.length());
|
||||
}
|
||||
|
||||
StringBuilder namePad = new StringBuilder(maxName);
|
||||
StringBuilder titlePad = new StringBuilder(maxTitle);
|
||||
StringBuilder groupPad = new StringBuilder(maxGroup);
|
||||
|
||||
stdOut("");
|
||||
printRow(namePad, titlePad, groupPad, "NAME", "TITLE", "GROUP", "VERSION",
|
||||
maxName, maxTitle, maxGroup);
|
||||
|
||||
for (PluginInfo plugin : plugins) {
|
||||
printRow(namePad, titlePad, groupPad, plugin.name, plugin.title, plugin.group, plugin.version,
|
||||
maxName, maxTitle, maxGroup);
|
||||
}
|
||||
stdOut("");
|
||||
}
|
||||
|
||||
private void printRow(StringBuilder namePad, StringBuilder titlePad, StringBuilder groupPad,
|
||||
String name, String title, String group, String version,
|
||||
int maxName, int maxTitle, int maxGroup) {
|
||||
stdOut("{0} {1} {2} {3}",
|
||||
pad(namePad, name, maxName),
|
||||
pad(titlePad, title, maxTitle),
|
||||
pad(groupPad, group, maxGroup),
|
||||
version
|
||||
);
|
||||
}
|
||||
|
||||
private String pad(StringBuilder sb, String str, int length) {
|
||||
sb.setLength(0);
|
||||
sb.append(str);
|
||||
while (sb.length() < length) {
|
||||
sb.append(SPACE);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private record PluginInfo(String name, String title, String group, String version) {}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "uninstall",
|
||||
description = "Uninstall plugins"
|
||||
)
|
||||
public class PluginUninstallCommand extends AbstractCommand {
|
||||
@Parameters(index = "0..*", description = "The plugins to uninstall. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
final PluginManager pluginManager;
|
||||
|
||||
// If a PLUGIN_PATH is provided, then use the LocalPluginManager
|
||||
if (pluginsPath != null) {
|
||||
pluginManager = new LocalPluginManager(mavenPluginRepositoryProvider.get());
|
||||
} else {
|
||||
// Otherwise, we delegate to the configured plugin-manager.
|
||||
pluginManager = this.pluginManagerProvider.get();
|
||||
}
|
||||
|
||||
List<PluginArtifact> uninstalled = pluginManager.uninstall(
|
||||
pluginArtifacts,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = uninstalled.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully uninstalled plugins {0} from {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,12 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import picocli.CommandLine;
|
||||
|
||||
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
|
||||
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
|
||||
@CommandLine.Option(names = {"--port"}, description = "the port to bind")
|
||||
Integer serverPort;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
|
||||
return super.call();
|
||||
}
|
||||
|
||||
protected static int defaultWorkerThread() {
|
||||
return Runtime.getRuntime().availableProcessors() * 4;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
@@ -8,6 +9,7 @@ import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
@@ -16,8 +18,9 @@ import java.util.Map;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "executor",
|
||||
description = "Start the Kestra executor"
|
||||
description = "start an executor"
|
||||
)
|
||||
@Slf4j
|
||||
public class ExecutorCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -28,22 +31,22 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private StartExecutorService startExecutorService;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "The list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipExecutions = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "The list of flow identifiers (tenant|namespace|flowId) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "a list of flow identifiers (tenant|namespace|flowId) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipFlows = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "The list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "a list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipNamespaces = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "The list of tenants to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "a list of tenants to skip, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipTenants = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "The list of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
|
||||
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "a list of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
|
||||
private List<String> startExecutors = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "The list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
|
||||
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "a list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
|
||||
private List<String> notStartExecutors = Collections.emptyList();
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@@ -63,10 +66,13 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
|
||||
executorService.run();
|
||||
|
||||
log.info("Executor started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,19 +1,22 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "indexer",
|
||||
description = "Start the Kestra indexer"
|
||||
description = "start an indexer"
|
||||
)
|
||||
@Slf4j
|
||||
public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -28,10 +31,13 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
|
||||
indexer.run();
|
||||
|
||||
log.info("Indexer started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -12,7 +12,7 @@ import java.util.Map;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "local",
|
||||
description = "Start the local development server"
|
||||
description = "start a local server"
|
||||
)
|
||||
public class LocalCommand extends StandAloneCommand {
|
||||
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.schedulers.AbstractScheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -13,7 +14,7 @@ import java.util.Map;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "scheduler",
|
||||
description = "Start the Kestra scheduler"
|
||||
description = "start an scheduler"
|
||||
)
|
||||
@Slf4j
|
||||
public class SchedulerCommand extends AbstractServerCommand {
|
||||
@@ -30,10 +31,12 @@ public class SchedulerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
|
||||
scheduler.run();
|
||||
|
||||
log.info("Scheduler started");
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -9,7 +9,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "server",
|
||||
description = "Manage servers",
|
||||
description = "handle servers",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
ExecutorCommand.class,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.services.FileChangedEventListener;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
@@ -10,8 +9,8 @@ import io.kestra.core.services.SkipExecutionService;
|
||||
import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.File;
|
||||
@@ -22,8 +21,9 @@ import java.util.Map;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "standalone",
|
||||
description = "Start the standalone all-in-one server"
|
||||
description = "start a standalone server"
|
||||
)
|
||||
@Slf4j
|
||||
public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
@@ -37,14 +37,10 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private StartExecutorService startExecutorService;
|
||||
|
||||
@Inject
|
||||
@Nullable
|
||||
private FileChangedEventListener fileWatcher;
|
||||
|
||||
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "the flow path containing flow to inject at startup (when running with a memory flow repository)")
|
||||
private File flowPath;
|
||||
|
||||
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to four times the number of available processors. Set it to 0 to avoid starting a worker.")
|
||||
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to two times the number of available processors. Set it to 0 to avoid starting a worker.")
|
||||
private int workerThread = defaultWorkerThread();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@@ -68,9 +64,6 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "a list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
|
||||
private List<String> notStartExecutors = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
|
||||
boolean indexerDisabled = false;
|
||||
|
||||
@Override
|
||||
public boolean isFlowAutoLoadEnabled() {
|
||||
return !tutorialsDisabled;
|
||||
@@ -89,16 +82,16 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
this.skipExecutionService.setSkipFlows(skipFlows);
|
||||
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
|
||||
this.skipExecutionService.setSkipTenants(skipTenants);
|
||||
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
if (flowPath != null) {
|
||||
try {
|
||||
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
|
||||
localFlowRepositoryLoader.load(null, this.flowPath);
|
||||
localFlowRepositoryLoader.load(this.flowPath);
|
||||
} catch (IOException e) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
|
||||
}
|
||||
@@ -112,16 +105,8 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
standAloneRunner.setWorkerThread(this.workerThread);
|
||||
}
|
||||
|
||||
if (this.indexerDisabled) {
|
||||
standAloneRunner.setIndexerEnabled(false);
|
||||
}
|
||||
|
||||
standAloneRunner.run();
|
||||
|
||||
if (fileWatcher != null) {
|
||||
fileWatcher.startListeningFromConfig();
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -12,27 +11,19 @@ import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "webserver",
|
||||
description = "Start the Kestra webserver"
|
||||
description = "start the webserver"
|
||||
)
|
||||
@Slf4j
|
||||
public class WebServerCommand extends AbstractServerCommand {
|
||||
private ExecutorService poolExecutor;
|
||||
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Inject
|
||||
private ExecutorsUtils executorsUtils;
|
||||
|
||||
@Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
|
||||
boolean tutorialsDisabled = false;
|
||||
|
||||
@Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
|
||||
boolean indexerDisabled = false;
|
||||
|
||||
@Override
|
||||
public boolean isFlowAutoLoadEnabled() {
|
||||
@@ -49,16 +40,8 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
// start the indexer
|
||||
if (!indexerDisabled) {
|
||||
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
|
||||
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
|
||||
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
|
||||
shutdownHook(false, () -> poolExecutor.shutdown());
|
||||
}
|
||||
|
||||
log.info("Webserver started");
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import io.kestra.core.runners.Worker;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@@ -15,17 +16,18 @@ import java.util.UUID;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "worker",
|
||||
description = "Start the Kestra worker"
|
||||
description = "start a worker"
|
||||
)
|
||||
@Slf4j
|
||||
public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to four times the number of available processors")
|
||||
@Option(names = {"-t", "--thread"}, description = "the max number of worker threads, defaults to two times the number of available processors")
|
||||
private int thread = defaultWorkerThread();
|
||||
|
||||
@Option(names = {"-g", "--worker-group"}, description = "The worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
|
||||
@Option(names = {"-g", "--worker-group"}, description = "the worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
|
||||
private String workerGroupKey = null;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@@ -37,11 +39,8 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(thread, workerGroupKey);
|
||||
|
||||
super.call();
|
||||
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
|
||||
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
|
||||
}
|
||||
@@ -53,6 +52,13 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
worker.run();
|
||||
|
||||
if (this.workerGroupKey != null) {
|
||||
log.info("Worker started with {} thread(s) in group '{}'", this.thread, this.workerGroupKey);
|
||||
}
|
||||
else {
|
||||
log.info("Worker started with {} thread(s)", this.thread);
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.sys;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -10,11 +9,10 @@ import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "reindex",
|
||||
description = "Reindex all records of a type: read them from the database then update them",
|
||||
description = "reindex all records of a type: read them from the database then update them",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
@@ -35,8 +33,8 @@ public class ReindexCommand extends AbstractCommand {
|
||||
List<Flow> allFlow = flowRepository.findAllForAllTenants();
|
||||
allFlow.stream()
|
||||
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
|
||||
.filter(Objects::nonNull)
|
||||
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
|
||||
.filter(flow -> flow != null)
|
||||
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow()));
|
||||
|
||||
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "sys",
|
||||
description = "Manage system maintenance mode",
|
||||
description = "handle systems maintenance",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
ReindexCommand.class,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.cli.commands.sys.database;
|
||||
|
||||
import ch.qos.logback.classic.Level;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -44,7 +44,7 @@ public class StateStoreMigrateCommand extends AbstractCommand {
|
||||
URI.create("/" + flow.getNamespace().replace(".", "/") + "/states")
|
||||
))).map(potentialStateStoreUrisForAFlow -> Map.entry(potentialStateStoreUrisForAFlow.getKey(), potentialStateStoreUrisForAFlow.getValue().stream().flatMap(uri -> {
|
||||
try {
|
||||
return storageInterface.allByPrefix(potentialStateStoreUrisForAFlow.getKey().getTenantId(), potentialStateStoreUrisForAFlow.getKey().getNamespace(), uri, false).stream();
|
||||
return storageInterface.allByPrefix(potentialStateStoreUrisForAFlow.getKey().getTenantId(), uri, false).stream();
|
||||
} catch (IOException e) {
|
||||
return Stream.empty();
|
||||
}
|
||||
@@ -59,9 +59,9 @@ public class StateStoreMigrateCommand extends AbstractCommand {
|
||||
boolean flowScoped = flowQualifierWithStateQualifiers[0].endsWith("/" + flow.getId());
|
||||
StateStore stateStore = new StateStore(runContext(runContextFactory, flow), false);
|
||||
|
||||
try (InputStream is = storageInterface.get(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri)) {
|
||||
try (InputStream is = storageInterface.get(flow.getTenantId(), stateStoreFileUri)) {
|
||||
stateStore.putState(flowScoped, stateName, stateSubName, taskRunValue, is.readAllBytes());
|
||||
storageInterface.delete(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri);
|
||||
storageInterface.delete(flow.getTenantId(), stateStoreFileUri);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "template",
|
||||
description = "Manage templates",
|
||||
description = "handle templates",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TemplateNamespaceCommand.class,
|
||||
|
||||
@@ -19,7 +19,7 @@ import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "export",
|
||||
description = "Export templates to a ZIP file",
|
||||
description = "export templates to a zip file",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
@@ -31,10 +31,10 @@ public class TemplateExportCommand extends AbstractApiCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "the namespace of templates to export")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The directory to export the file to")
|
||||
@CommandLine.Parameters(index = "0", description = "the directory to export the file to")
|
||||
public Path directory;
|
||||
|
||||
@Override
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlFlowParser;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -11,10 +12,12 @@ import java.util.Collections;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "validate",
|
||||
description = "Validate a template"
|
||||
description = "validate a template"
|
||||
)
|
||||
@TemplateEnabled
|
||||
public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlFlowParser yamlFlowParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -23,12 +26,12 @@ public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
Template.class,
|
||||
yamlFlowParser,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
Template template = (Template) object;
|
||||
return template.getNamespace() + " / " + template.getId();
|
||||
},
|
||||
(Object object) -> Collections.emptyList(),
|
||||
(Object object) -> Collections.emptyList()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "namespace",
|
||||
description = "Manage namespace templates",
|
||||
description = "handle namespace templates",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TemplateNamespaceUpdateCommand.class,
|
||||
|
||||
@@ -2,30 +2,34 @@ package io.kestra.cli.commands.templates.namespaces;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.commands.templates.TemplateValidateCommand;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.serializers.YamlFlowParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "update",
|
||||
description = "Update namespace templates",
|
||||
description = "handle namespace templates",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
@Inject
|
||||
public YamlFlowParser yamlFlowParser;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -34,8 +38,8 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
|
||||
try (var files = Files.walk(directory)) {
|
||||
List<Template> templates = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> YamlParser.parse(path.toFile(), Template.class))
|
||||
.filter(YamlFlowParser::isValidExtension)
|
||||
.map(path -> yamlFlowParser.parse(path.toFile(), Template.class))
|
||||
.toList();
|
||||
|
||||
if (templates.isEmpty()) {
|
||||
|
||||
153
cli/src/main/java/io/kestra/cli/plugins/PluginDownloader.java
Normal file
153
cli/src/main/java/io/kestra/cli/plugins/PluginDownloader.java
Normal file
@@ -0,0 +1,153 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.repository.internal.MavenRepositorySystemUtils;
|
||||
import org.eclipse.aether.DefaultRepositorySystemSession;
|
||||
import org.eclipse.aether.RepositorySystem;
|
||||
import org.eclipse.aether.RepositorySystemSession;
|
||||
import org.eclipse.aether.artifact.Artifact;
|
||||
import org.eclipse.aether.artifact.DefaultArtifact;
|
||||
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
|
||||
import org.eclipse.aether.impl.DefaultServiceLocator;
|
||||
import org.eclipse.aether.repository.LocalRepository;
|
||||
import org.eclipse.aether.repository.RemoteRepository;
|
||||
import org.eclipse.aether.resolution.*;
|
||||
import org.eclipse.aether.spi.connector.RepositoryConnectorFactory;
|
||||
import org.eclipse.aether.spi.connector.transport.TransporterFactory;
|
||||
import org.eclipse.aether.transport.file.FileTransporterFactory;
|
||||
import org.eclipse.aether.transport.http.HttpTransporterFactory;
|
||||
import org.eclipse.aether.util.repository.AuthenticationBuilder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class PluginDownloader {
|
||||
private final List<RepositoryConfig> repositoryConfigs;
|
||||
private final RepositorySystem system;
|
||||
private final RepositorySystemSession session;
|
||||
|
||||
@Inject
|
||||
public PluginDownloader(
|
||||
List<RepositoryConfig> repositoryConfigs,
|
||||
@Nullable @Value("${kestra.plugins.local-repository-path}") String localRepositoryPath
|
||||
) {
|
||||
this.repositoryConfigs = repositoryConfigs;
|
||||
this.system = repositorySystem();
|
||||
this.session = repositorySystemSession(system, localRepositoryPath);
|
||||
}
|
||||
|
||||
public void addRepository(RepositoryConfig repositoryConfig) {
|
||||
this.repositoryConfigs.add(repositoryConfig);
|
||||
}
|
||||
|
||||
public List<URL> resolve(List<String> dependencies) throws MalformedURLException, ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<RemoteRepository> repositories = remoteRepositories();
|
||||
|
||||
List<ArtifactResult> artifactResults = resolveArtifacts(repositories, dependencies);
|
||||
List<URL> localUrls = resolveUrls(artifactResults);
|
||||
log.debug("Resolved Plugin {} with {}", dependencies, localUrls);
|
||||
|
||||
return localUrls;
|
||||
}
|
||||
|
||||
private List<RemoteRepository> remoteRepositories() {
|
||||
return repositoryConfigs
|
||||
.stream()
|
||||
.map(repositoryConfig -> {
|
||||
var build = new RemoteRepository.Builder(
|
||||
repositoryConfig.getId(),
|
||||
"default",
|
||||
repositoryConfig.getUrl()
|
||||
);
|
||||
|
||||
if (repositoryConfig.getBasicAuth() != null) {
|
||||
var authenticationBuilder = new AuthenticationBuilder();
|
||||
authenticationBuilder.addUsername(repositoryConfig.getBasicAuth().getUsername());
|
||||
authenticationBuilder.addPassword(repositoryConfig.getBasicAuth().getPassword());
|
||||
|
||||
build.setAuthentication(authenticationBuilder.build());
|
||||
}
|
||||
|
||||
return build.build();
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static RepositorySystem repositorySystem() {
|
||||
DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator();
|
||||
locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
|
||||
locator.addService(TransporterFactory.class, FileTransporterFactory.class);
|
||||
locator.addService(TransporterFactory.class, HttpTransporterFactory.class);
|
||||
|
||||
return locator.getService(RepositorySystem.class);
|
||||
}
|
||||
|
||||
private RepositorySystemSession repositorySystemSession(RepositorySystem system, String localRepositoryPath) {
|
||||
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
|
||||
|
||||
if (localRepositoryPath == null) {
|
||||
try {
|
||||
final String tempDirectory = Files.createTempDirectory(this.getClass().getSimpleName().toLowerCase())
|
||||
.toAbsolutePath()
|
||||
.toString();
|
||||
|
||||
localRepositoryPath = tempDirectory;
|
||||
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
FileUtils.deleteDirectory(new File(tempDirectory));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
LocalRepository localRepo = new LocalRepository(localRepositoryPath);
|
||||
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
private List<ArtifactResult> resolveArtifacts(List<RemoteRepository> repositories, List<String> dependencies) throws ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<ArtifactResult> results = new ArrayList<>(dependencies.size());
|
||||
for (String dependency: dependencies) {
|
||||
var artifact = new DefaultArtifact(dependency);
|
||||
var version = system.resolveVersionRange(session, new VersionRangeRequest(artifact, repositories, null));
|
||||
var artifactRequest = new ArtifactRequest(
|
||||
new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "jar", version.getHighestVersion().toString()),
|
||||
repositories,
|
||||
null
|
||||
);
|
||||
var artifactResult = system.resolveArtifact(session, artifactRequest);
|
||||
results.add(artifactResult);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<URL> resolveUrls(List<ArtifactResult> artifactResults) throws MalformedURLException {
|
||||
ImmutableList.Builder<URL> urls = ImmutableList.builder();
|
||||
for (ArtifactResult artifactResult : artifactResults) {
|
||||
URL url;
|
||||
url = artifactResult.getArtifact().getFile().toPath().toUri().toURL();
|
||||
urls.add(url);
|
||||
}
|
||||
return urls.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class RepositoryConfig {
|
||||
String id;
|
||||
|
||||
String url;
|
||||
|
||||
BasicAuth basicAuth;
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public static class BasicAuth {
|
||||
private String username;
|
||||
private String password;
|
||||
}
|
||||
|
||||
public RepositoryConfig(@Parameter String id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
@@ -1,268 +0,0 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.exceptions.FlowProcessingException;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.FlowWithPath;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
@Requires(property = "micronaut.io.watch.enabled", value = "true")
|
||||
public class FileChangedEventListener {
|
||||
@Nullable
|
||||
private final FileWatchConfiguration fileWatchConfiguration;
|
||||
@Nullable
|
||||
private final WatchService watchService;
|
||||
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepositoryInterface;
|
||||
|
||||
@Inject
|
||||
private PluginDefaultService pluginDefaultService;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
protected FlowListenersInterface flowListeners;
|
||||
|
||||
@Nullable
|
||||
@Value("${micronaut.io.watch.tenantId}")
|
||||
private String tenantId;
|
||||
|
||||
FlowFilesManager flowFilesManager;
|
||||
|
||||
private List<FlowWithPath> flows = new ArrayList<>();
|
||||
|
||||
private boolean isStarted = false;
|
||||
|
||||
@Inject
|
||||
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
|
||||
this.fileWatchConfiguration = fileWatchConfiguration;
|
||||
this.watchService = watchService;
|
||||
}
|
||||
|
||||
public void startListeningFromConfig() throws IOException, InterruptedException {
|
||||
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
|
||||
List<Path> paths = fileWatchConfiguration.getPaths();
|
||||
this.setup(paths);
|
||||
|
||||
flowListeners.run();
|
||||
// Init existing flows not already in files
|
||||
flowListeners.listen(flows -> {
|
||||
if (!isStarted) {
|
||||
for (FlowInterface flow : flows) {
|
||||
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
|
||||
flowToFile(flow, this.buildPath(flow));
|
||||
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
|
||||
}
|
||||
}
|
||||
this.isStarted = true;
|
||||
}
|
||||
});
|
||||
|
||||
// Listen for new/updated/deleted flows
|
||||
flowListeners.listen((current, previous) -> {
|
||||
// If deleted
|
||||
if (current.isDeleted()) {
|
||||
this.flows.stream().filter(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
deleteFile(Paths.get(flowWithPath.getPath()));
|
||||
});
|
||||
this.flows.removeIf(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision()));
|
||||
} else {
|
||||
// if updated/created
|
||||
Optional<FlowWithPath> flowWithPath = this.flows.stream().filter(fwp -> fwp.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst();
|
||||
if (flowWithPath.isPresent()) {
|
||||
flowToFile(current, Paths.get(flowWithPath.get().getPath()));
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(current, this.buildPath(current).toString()));
|
||||
flowToFile(current, null);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.startListening(paths);
|
||||
} else {
|
||||
log.info("File watching is disabled.");
|
||||
}
|
||||
}
|
||||
|
||||
public void startListening(List<Path> paths) throws IOException, InterruptedException {
|
||||
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
|
||||
|
||||
for (Path path : paths) {
|
||||
path.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
|
||||
}
|
||||
|
||||
WatchKey key;
|
||||
while ((key = watchService.take()) != null) {
|
||||
for (WatchEvent<?> watchEvent : key.pollEvents()) {
|
||||
try {
|
||||
WatchEvent.Kind<?> kind = watchEvent.kind();
|
||||
Path entry = (Path) watchEvent.context();
|
||||
|
||||
if (entry.toString().endsWith(".yml") || entry.toString().endsWith(".yaml")) {
|
||||
|
||||
if (kind == StandardWatchEventKinds.ENTRY_CREATE || kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
|
||||
Path filePath = ((Path) key.watchable()).resolve(entry);
|
||||
if (Files.isDirectory(filePath)) {
|
||||
loadFlowsFromFolder(filePath);
|
||||
} else {
|
||||
|
||||
try {
|
||||
String content = Files.readString(filePath, Charset.defaultCharset());
|
||||
|
||||
Optional<FlowWithSource> flow = parseFlow(content, entry);
|
||||
if (flow.isPresent()) {
|
||||
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
// Check if we already have a file with the given path
|
||||
if (flows.stream().anyMatch(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()))) {
|
||||
Optional<FlowWithPath> previous = flows.stream().filter(flowWithPath -> flowWithPath.getPath().equals(filePath.toString())).findFirst();
|
||||
// Check if Flow from file has id/namespace updated
|
||||
if (previous.isPresent() && !previous.get().uidWithoutRevision().equals(flow.get().uidWithoutRevision())) {
|
||||
flows.removeIf(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()));
|
||||
flowFilesManager.deleteFlow(previous.get().getTenantId(), previous.get().getNamespace(), previous.get().getId());
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
|
||||
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
|
||||
}
|
||||
|
||||
} catch (NoSuchFileException e) {
|
||||
log.error("File not found: {}", entry, e);
|
||||
} catch (IOException e) {
|
||||
log.error("Error reading file: {}", entry, e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Path filePath = ((Path) key.watchable()).resolve(entry);
|
||||
flows.stream()
|
||||
.filter(flow -> flow.getPath().equals(filePath.toString()))
|
||||
.findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
|
||||
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Unexpected error while watching flows", e);
|
||||
}
|
||||
}
|
||||
key.reset();
|
||||
}
|
||||
}
|
||||
|
||||
private void setup(List<Path> folders) {
|
||||
for (Path folder : folders) {
|
||||
this.loadFlowsFromFolder(folder);
|
||||
}
|
||||
}
|
||||
|
||||
private void loadFlowsFromFolder(Path folder) {
|
||||
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
|
||||
|
||||
try {
|
||||
Files.walkFileTree(folder, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
|
||||
dir.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
|
||||
if (!dir.equals(folder)) {
|
||||
loadFlowsFromFolder(dir);
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) {
|
||||
String content = Files.readString(file, Charset.defaultCharset());
|
||||
Optional<FlowWithSource> flow = parseFlow(content, file);
|
||||
|
||||
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
|
||||
flows.add(FlowWithPath.of(flow.get(), file.toString()));
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
log.info("Loaded files from the folder {}", folder);
|
||||
} catch (IOException e) {
|
||||
log.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void flowToFile(FlowInterface flow, Path path) {
|
||||
Path defaultPath = path != null ? path : this.buildPath(flow);
|
||||
|
||||
try {
|
||||
Files.writeString(defaultPath, flow.source());
|
||||
log.info("Flow {} has been written to file {}", flow.getId(), defaultPath);
|
||||
} catch (IOException e) {
|
||||
log.error("Error writing file: {}", defaultPath, e);
|
||||
}
|
||||
}
|
||||
|
||||
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
|
||||
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
|
||||
|
||||
try {
|
||||
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
|
||||
modelValidator.validate(flow);
|
||||
return Optional.of(flow);
|
||||
} catch (ConstraintViolationException | FlowProcessingException e) {
|
||||
log.warn("Error while parsing flow: {}", entry, e);
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
private void deleteFile(Path file) {
|
||||
try {
|
||||
if (Files.deleteIfExists(file)) {
|
||||
log.info("File {} has been deleted successfully.", file);
|
||||
} else {
|
||||
log.warn("File {} does not exist.", file);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("Error deleting file: {}", file, e);
|
||||
}
|
||||
}
|
||||
|
||||
private Path buildPath(FlowInterface flow) {
|
||||
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
|
||||
public interface FlowFilesManager {
|
||||
|
||||
FlowWithSource createOrUpdateFlow(GenericFlow flow);
|
||||
|
||||
void deleteFlow(FlowWithSource toDelete);
|
||||
|
||||
void deleteFlow(String tenantId, String namespace, String id);
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public class LocalFlowFileWatcher implements FlowFilesManager {
|
||||
private final FlowRepositoryInterface flowRepository;
|
||||
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
|
||||
this.flowRepository = flowRepository;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
|
||||
return flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous))
|
||||
.orElseGet(() -> flowRepository.create(flow));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFlow(FlowWithSource toDelete) {
|
||||
flowRepository.findByIdWithSource(toDelete.getTenantId(), toDelete.getNamespace(), toDelete.getId()).ifPresent(flowRepository::delete);
|
||||
log.info("Flow {} has been deleted", toDelete.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFlow(String tenantId, String namespace, String id) {
|
||||
flowRepository.findByIdWithSource(tenantId, namespace, id).ifPresent(flowRepository::delete);
|
||||
log.info("Flow {} has been deleted", id);
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,6 @@
|
||||
micronaut:
|
||||
application:
|
||||
name: kestra
|
||||
# Disable Micronaut Open Telemetry
|
||||
otel:
|
||||
enabled: false
|
||||
router:
|
||||
static-resources:
|
||||
swagger:
|
||||
@@ -15,9 +12,6 @@ micronaut:
|
||||
static:
|
||||
paths: classpath:static
|
||||
mapping: /static/**
|
||||
root:
|
||||
paths: classpath:root
|
||||
mapping: /**
|
||||
server:
|
||||
max-request-size: 10GB
|
||||
multipart:
|
||||
@@ -27,14 +21,13 @@ micronaut:
|
||||
write-idle-timeout: 60m
|
||||
idle-timeout: 60m
|
||||
netty:
|
||||
max-zstd-encode-size: 67108864 # increased to 64MB from the default of 32MB
|
||||
max-chunk-size: 10MB
|
||||
max-header-size: 32768 # increased from the default of 8k
|
||||
responses:
|
||||
file:
|
||||
cache-seconds: 86400
|
||||
cache-control:
|
||||
public: true
|
||||
responses:
|
||||
file:
|
||||
cache-seconds: 86400
|
||||
cache-control:
|
||||
public: true
|
||||
|
||||
# Access log configuration, see https://docs.micronaut.io/latest/guide/index.html#accessLogger
|
||||
access-logger:
|
||||
@@ -44,7 +37,6 @@ micronaut:
|
||||
exclusions:
|
||||
- /ui/.+
|
||||
- /health
|
||||
- /health/.+
|
||||
- /prometheus
|
||||
http-version: HTTP_1_1
|
||||
caches:
|
||||
@@ -78,13 +70,6 @@ micronaut:
|
||||
type: scheduled
|
||||
core-pool-size: 1
|
||||
|
||||
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
|
||||
metrics:
|
||||
export:
|
||||
otlp:
|
||||
enabled: false
|
||||
# url: http://localhost:4318/v1/metrics
|
||||
|
||||
jackson:
|
||||
serialization:
|
||||
writeDatesAsTimestamps: false
|
||||
@@ -142,18 +127,13 @@ kestra:
|
||||
jdbc:
|
||||
queues:
|
||||
min-poll-interval: 25ms
|
||||
max-poll-interval: 500ms
|
||||
poll-switch-interval: 60s
|
||||
max-poll-interval: 1000ms
|
||||
poll-switch-interval: 5s
|
||||
|
||||
cleaner:
|
||||
initial-delay: 1h
|
||||
fixed-delay: 1h
|
||||
retention: 7d
|
||||
types:
|
||||
- type : io.kestra.core.models.executions.LogEntry
|
||||
retention: 1h
|
||||
- type: io.kestra.core.models.executions.MetricEntry
|
||||
retention: 1h
|
||||
|
||||
plugins:
|
||||
repositories:
|
||||
@@ -172,16 +152,13 @@ kestra:
|
||||
values:
|
||||
recoverMissedSchedules: ALL
|
||||
variables:
|
||||
env-vars-prefix: ENV_
|
||||
env-vars-prefix: KESTRA_
|
||||
cache-enabled: true
|
||||
cache-size: 1000
|
||||
|
||||
metrics:
|
||||
prefix: kestra
|
||||
|
||||
traces:
|
||||
root: DISABLED
|
||||
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
@@ -199,34 +176,15 @@ kestra:
|
||||
liveness:
|
||||
enabled: true
|
||||
# The expected time between liveness probe.
|
||||
interval: 10s
|
||||
interval: 5s
|
||||
# The timeout used to detect service failures.
|
||||
timeout: 1m
|
||||
timeout: 45s
|
||||
# The time to wait before executing a liveness probe.
|
||||
initialDelay: 1m
|
||||
initialDelay: 45s
|
||||
# The expected time between service heartbeats.
|
||||
heartbeatInterval: 3s
|
||||
service:
|
||||
purge:
|
||||
initial-delay: 1h
|
||||
fixed-delay: 1d
|
||||
retention: 30d
|
||||
anonymous-usage-report:
|
||||
enabled: true
|
||||
uri: https://api.kestra.io/v1/reports/usages
|
||||
initial-delay: 5m
|
||||
fixed-delay: 1h
|
||||
|
||||
hidden-labels:
|
||||
prefixes:
|
||||
- system.
|
||||
- internal.
|
||||
|
||||
otel:
|
||||
exclusions:
|
||||
- /ping
|
||||
- /metrics
|
||||
- /health
|
||||
- /env
|
||||
- /prometheus
|
||||
propagators: tracecontext, baggage
|
||||
|
||||
@@ -13,7 +13,8 @@ import picocli.CommandLine;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class AppTest {
|
||||
@@ -25,7 +26,7 @@ class AppTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(App.class, ctx, "--help");
|
||||
|
||||
assertThat(out.toString()).contains("kestra");
|
||||
assertThat(out.toString(), containsString("kestra"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,23 +42,7 @@ class AppTest {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
|
||||
|
||||
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
|
||||
assertThat(out.toString()).startsWith("Usage: kestra server " + serverType);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void missingRequiredParamsPrintHelpInsteadOfException() {
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
|
||||
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString()).startsWith("Missing required parameters: ");
|
||||
assertThat(out.toString()).contains("Usage: kestra flow namespace update ");
|
||||
assertThat(out.toString()).doesNotContain("MissingParameterException: ");
|
||||
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,8 @@ import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
class ServerCommandValidatorTest {
|
||||
|
||||
@@ -39,8 +40,8 @@ class ServerCommandValidatorTest {
|
||||
.start()
|
||||
);
|
||||
final Throwable rootException = getRootException(exception);
|
||||
assertThat(rootException.getClass()).isEqualTo(ServerCommandValidator.ServerCommandException.class);
|
||||
assertThat(rootException.getMessage()).isEqualTo("Incomplete server configuration - missing required properties");
|
||||
assertThat(rootException.getClass(), is(ServerCommandValidator.ServerCommandException.class));
|
||||
assertThat(rootException.getMessage(), is("Incomplete server configuration - missing required properties"));
|
||||
}
|
||||
|
||||
private Throwable getRootException(Throwable exception) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user