mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-29 09:00:26 -05:00
Compare commits
123 Commits
v0.22.0-rc
...
v0.21.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23bde6b716 | ||
|
|
0b2df61c2e | ||
|
|
d30b331b3c | ||
|
|
1fa026f0ee | ||
|
|
3a39c65829 | ||
|
|
b174a81562 | ||
|
|
077421d59c | ||
|
|
fcf999ff61 | ||
|
|
3e2f798ccf | ||
|
|
69faecf339 | ||
|
|
aa3a6854ae | ||
|
|
bb6edfff98 | ||
|
|
f7b495d22f | ||
|
|
eaf63f307c | ||
|
|
905f778204 | ||
|
|
0b15711b23 | ||
|
|
a51b193f4b | ||
|
|
42a7938d38 | ||
|
|
5783a95db3 | ||
|
|
785afe7884 | ||
|
|
28fea2e5dc | ||
|
|
dcc59fde35 | ||
|
|
4e9ac8b3a2 | ||
|
|
5d5b74613b | ||
|
|
44c149e8d5 | ||
|
|
c262525341 | ||
|
|
7da24df76f | ||
|
|
2664307517 | ||
|
|
8c0f0f86b6 | ||
|
|
b651f53e8a | ||
|
|
10fad29923 | ||
|
|
d9962a89a7 | ||
|
|
60b189d101 | ||
|
|
6b065815b7 | ||
|
|
8c943b43f0 | ||
|
|
8b813115a9 | ||
|
|
4a6bb0ba87 | ||
|
|
a2daf0f493 | ||
|
|
0e3218c7be | ||
|
|
d98c5e19fc | ||
|
|
e086099d6c | ||
|
|
df3bec4d6c | ||
|
|
4b946175bf | ||
|
|
0e891f64a2 | ||
|
|
47cc38d89e | ||
|
|
d2f9060b5c | ||
|
|
c36cc504eb | ||
|
|
8d3b3a8493 | ||
|
|
e7955ca7bf | ||
|
|
016cd09849 | ||
|
|
23846d6100 | ||
|
|
0b247b709e | ||
|
|
bfee53a9b1 | ||
|
|
70a3c98aca | ||
|
|
a923124108 | ||
|
|
92484c0333 | ||
|
|
eb21452a83 | ||
|
|
433fe963e2 | ||
|
|
7a2390ddf7 | ||
|
|
1c6a14d17a | ||
|
|
0ba64f7979 | ||
|
|
38720e96a9 | ||
|
|
0f7d9b2adc | ||
|
|
210fc246ac | ||
|
|
df0d037f66 | ||
|
|
07ea309a47 | ||
|
|
1f09f53a88 | ||
|
|
f356921daa | ||
|
|
3d50ef03f7 | ||
|
|
7b309eb2d2 | ||
|
|
b22b0642ed | ||
|
|
1cbc9195c4 | ||
|
|
b853dd0b6e | ||
|
|
f7df60419c | ||
|
|
9f76cae55e | ||
|
|
aca5a9ff4c | ||
|
|
a6ce86d702 | ||
|
|
4392c89ec7 | ||
|
|
d74a31ba7f | ||
|
|
cb3195900f | ||
|
|
cf4b91f44d | ||
|
|
33ecf8d5f5 | ||
|
|
39a2293a45 | ||
|
|
88c93995df | ||
|
|
6afe5ff41f | ||
|
|
a3a8863f46 | ||
|
|
fcfee5116b | ||
|
|
3f2d91014b | ||
|
|
41149a83b3 | ||
|
|
1ed882e8f3 | ||
|
|
0f6e0de29c | ||
|
|
238bc532c3 | ||
|
|
6919848ab3 | ||
|
|
86aec88de4 | ||
|
|
f609d57a0c | ||
|
|
f3852a3c24 | ||
|
|
804ff6a81c | ||
|
|
7869f90edd | ||
|
|
2b72306b3d | ||
|
|
f0d5d4b93f | ||
|
|
4e4ab80b2f | ||
|
|
c33d08afda | ||
|
|
a246ac38f5 | ||
|
|
7bdaa81dee | ||
|
|
6a1d831849 | ||
|
|
95d2d1dfa3 | ||
|
|
d12dd179c2 | ||
|
|
ceda5eb8ee | ||
|
|
1301aaac76 | ||
|
|
5f7468a9a4 | ||
|
|
aa24c888a3 | ||
|
|
c792d9b6ea | ||
|
|
a921b95404 | ||
|
|
e46df069a9 | ||
|
|
c08f4f24ca | ||
|
|
67b3937824 | ||
|
|
17e1623342 | ||
|
|
d12fbf05b0 | ||
|
|
efa2d44e76 | ||
|
|
acdb46cea0 | ||
|
|
c1807516f5 | ||
|
|
ab796dff93 | ||
|
|
2d98f909de |
@@ -1,67 +0,0 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
USER root
|
||||
WORKDIR /root
|
||||
|
||||
RUN apt update && apt install -y \
|
||||
apt-transport-https ca-certificates gnupg curl wget git zip unzip less zsh net-tools iputils-ping jq lsof
|
||||
|
||||
ENV HOME="/root"
|
||||
|
||||
# --------------------------------------
|
||||
# Git
|
||||
# --------------------------------------
|
||||
# Need to add the devcontainer workspace folder as a safe directory to enable git
|
||||
# version control system to be enabled in the containers file system.
|
||||
RUN git config --global --add safe.directory "/workspaces/kestra"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Oh my zsh
|
||||
# --------------------------------------
|
||||
RUN sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" -- \
|
||||
-t robbyrussell \
|
||||
-p git -p node -p npm
|
||||
|
||||
ENV SHELL=/bin/zsh
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Java
|
||||
# --------------------------------------
|
||||
RUN wget https://download.oracle.com/java/21/latest/jdk-21_linux-x64_bin.deb
|
||||
RUN dpkg -i ./jdk-21_linux-x64_bin.deb
|
||||
ENV JAVA_HOME=/usr/java/jdk-21-oracle-x64
|
||||
ENV PATH="$PATH:$JAVA_HOME/bin"
|
||||
# Will load a custom configuration file for Micronaut
|
||||
ENV MICRONAUT_ENVIRONMENTS=local,override
|
||||
# Sets the path where you save plugins as Jar and is loaded during the startup process
|
||||
ENV KESTRA_PLUGINS_PATH="/workspaces/kestra/local/plugins"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Node.js
|
||||
# --------------------------------------
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_22.x -o nodesource_setup.sh \
|
||||
&& bash nodesource_setup.sh && apt install -y nodejs
|
||||
# Increases JavaScript heap memory to 4GB to prevent heap out of error during startup
|
||||
ENV NODE_OPTIONS=--max-old-space-size=4096
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Python
|
||||
# --------------------------------------
|
||||
RUN apt install -y python3 pip python3-venv
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# SSH
|
||||
# --------------------------------------
|
||||
RUN mkdir -p ~/.ssh
|
||||
RUN touch ~/.ssh/config
|
||||
RUN echo "Host github.com" >> ~/.ssh/config \
|
||||
&& echo " IdentityFile ~/.ssh/id_ed25519" >> ~/.ssh/config
|
||||
RUN touch ~/.ssh/id_ed25519
|
||||
# --------------------------------------
|
||||
@@ -1,149 +0,0 @@
|
||||
# Kestra Devcontainer
|
||||
|
||||
This devcontainer provides a quick and easy setup for anyone using VSCode to get up and running quickly with this project to start development on either the frontend or backend. It bootstraps a docker container for you to develop inside of without the need to manually setup the environment.
|
||||
|
||||
---
|
||||
|
||||
## INSTRUCTIONS
|
||||
|
||||
### Setup:
|
||||
|
||||
Take a look at this guide to get an idea of what the setup is like as this devcontainer setup follows this approach: https://kestra.io/docs/getting-started/contributing
|
||||
|
||||
Once you have this repo cloned to your local system, you will need to install the VSCode extension [Remote Development](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack).
|
||||
|
||||
Then run the following command from the command palette:
|
||||
`Dev Containers: Open Folder in Container...` and select your Kestra root folder.
|
||||
|
||||
This will then put you inside a docker container ready for development.
|
||||
|
||||
NOTE: you'll need to wait for the gradle build to finish and compile Java files but this process should happen automatically within VSCode.
|
||||
|
||||
In the meantime, you can move onto the next step...
|
||||
|
||||
---
|
||||
|
||||
### Development:
|
||||
|
||||
- Create a `.env.development.local` file in the `ui` folder and paste the following:
|
||||
|
||||
```bash
|
||||
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
|
||||
VITE_APP_API_URL=http://localhost:8080
|
||||
```
|
||||
|
||||
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
|
||||
|
||||
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
|
||||
|
||||
Now you have two choices:
|
||||
|
||||
`Local mode`:
|
||||
|
||||
Runs the Kestra server in local mode which uses a H2 database, so this is the only config you'd need:
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
You can then open a new terminal and run the following command to start the backend server: `./gradlew runLocal`
|
||||
|
||||
`Standalone mode`:
|
||||
|
||||
Runs in standalone mode which uses Postgres. Make sure to have a local Postgres instance already running on localhost:
|
||||
|
||||
```yaml
|
||||
kestra:
|
||||
repository:
|
||||
type: postgres
|
||||
storage:
|
||||
type: local
|
||||
local:
|
||||
base-path: "/app/storage"
|
||||
queue:
|
||||
type: postgres
|
||||
tasks:
|
||||
tmp-dir:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
# It is important to note that you must use the "host.docker.internal" host when connecting to a docker container outside of your devcontainer as attempting to use localhost will only point back to this devcontainer.
|
||||
url: jdbc:postgresql://host.docker.internal:5432/kestra
|
||||
driverClassName: org.postgresql.Driver
|
||||
username: kestra
|
||||
password: k3str4
|
||||
|
||||
flyway:
|
||||
datasources:
|
||||
postgres:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
# We must ignore missing migrations as we may delete the wrong ones or delete those that are not used anymore.
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
Then add the following settings to the `.vscode/launch.json` file:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "java",
|
||||
"name": "Kestra Standalone",
|
||||
"request": "launch",
|
||||
"mainClass": "io.kestra.cli.App",
|
||||
"projectName": "cli",
|
||||
"args": "server standalone"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
You can then use the VSCode `Run and Debug` extension to start the Kestra server.
|
||||
|
||||
Additionally, if you're doing frontend development, you can run `npm run dev` from the `ui` folder after having the above running (which will provide a backend) to access your application from `localhost:5173`. This has the benefit to watch your changes and hot-reload upon doing frontend changes.
|
||||
|
||||
#### Plugins
|
||||
If you want your plugins to be loaded inside your devcontainer, point the `source` field to a folder containing jars of the plugins you want to embed in the following snippet in `devcontainer.json`:
|
||||
```
|
||||
"mounts": [
|
||||
{
|
||||
"source": "/absolute/path/to/your/local/jar/plugins/folder",
|
||||
"target": "/workspaces/kestra/local/plugins",
|
||||
"type": "bind"
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GIT
|
||||
|
||||
If you want to commit to GitHub, make sure to navigate to the `~/.ssh` folder and either create a new SSH key or override the existing `id_ed25519` file and paste an existing SSH key from your local machine into this file. You will then need to change the permissions of the file by running: `chmod 600 id_ed25519`. This will allow you to then push to GitHub.
|
||||
|
||||
---
|
||||
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"name": "kestra",
|
||||
"build": {
|
||||
"context": ".",
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"workspaceFolder": "/workspaces/kestra",
|
||||
"forwardPorts": [5173, 8080],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/bin/zsh"
|
||||
}
|
||||
},
|
||||
"workbench.iconTheme": "vscode-icons",
|
||||
"editor.tabSize": 4,
|
||||
"editor.formatOnSave": true,
|
||||
"files.insertFinalNewline": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"telemetry.telemetryLevel": "off",
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": "active"
|
||||
},
|
||||
"extensions": [
|
||||
"redhat.vscode-yaml",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"eamodio.gitlens",
|
||||
"esbenp.prettier-vscode",
|
||||
"aaron-bond.better-comments",
|
||||
"codeandstuff.package-json-upgrade",
|
||||
"andys8.jest-snippets",
|
||||
"oderwat.indent-rainbow",
|
||||
"evondev.indent-rainbow-palettes",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"IronGeek.vscode-env",
|
||||
"yoavbls.pretty-ts-errors",
|
||||
"github.vscode-github-actions",
|
||||
"vscjava.vscode-java-pack",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
119
.github/workflows/check.yml
vendored
Normal file
119
.github/workflows/check.yml
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
name: Daily Core check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
env:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
name: Check & Publish
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Build the docker-compose stack
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
|
||||
# Gradle check
|
||||
- name: Build with Gradle
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel --refresh-dependencies
|
||||
|
||||
# Sonar
|
||||
- name: Analyze with Sonar
|
||||
if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: 'Set up Cloud SDK'
|
||||
if: ${{ github.ref == 'refs/heads/develop' && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'Copy jacoco files'
|
||||
if: ${{ github.ref == 'refs/heads/develop' && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
|
||||
# report test
|
||||
- name: Test Report
|
||||
uses: mikepenz/action-junit-report@v5
|
||||
if: success() || failure()
|
||||
with:
|
||||
report_paths: '**/build/test-results/**/TEST-*.xml'
|
||||
|
||||
# Codecov
|
||||
- uses: codecov/codecov-action@v5
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: always() && job.status != 'success'
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
@@ -6,11 +6,11 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
schedule:
|
||||
- cron: '0 5 * * 1'
|
||||
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -51,23 +51,13 @@ jobs:
|
||||
# Set up JDK
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v4
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: 21
|
||||
|
||||
- name: Setup gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
uses: gradle/actions/setup-gradle@v4
|
||||
|
||||
- name: Build with Gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
run: ./gradlew testClasses -x :ui:installFrontend -x :ui:assembleFrontend
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
if: ${{ matrix.language != 'java' }}
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
16
.github/workflows/docker.yml
vendored
16
.github/workflows/docker.yml
vendored
@@ -47,11 +47,11 @@ jobs:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: jattach
|
||||
packages: ""
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
python-libs: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
fi
|
||||
# Download release
|
||||
- name: Download release
|
||||
uses: robinraju/release-downloader@v1.12
|
||||
uses: robinraju/release-downloader@v1.11
|
||||
with:
|
||||
tag: ${{steps.vars.outputs.tag}}
|
||||
fileName: 'kestra-*'
|
||||
@@ -89,11 +89,6 @@ jobs:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
@@ -104,6 +99,11 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
# Docker Build and push
|
||||
- name: Push to Docker Hub
|
||||
uses: docker/build-push-action@v6
|
||||
|
||||
12
.github/workflows/e2e.yml
vendored
12
.github/workflows/e2e.yml
vendored
@@ -42,13 +42,19 @@ jobs:
|
||||
with:
|
||||
path: kestra
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Docker Image
|
||||
- name: Download Kestra Image
|
||||
|
||||
67
.github/workflows/generate-translations.yml
vendored
67
.github/workflows/generate-translations.yml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Auto-Translate UI keys and create PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9-21 * * *" # Every hour from 9 AM to 9 PM
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retranslate_modified_keys:
|
||||
description: "Whether to re-translate modified keys even if they already have translations."
|
||||
type: choice
|
||||
options:
|
||||
- "false"
|
||||
- "true"
|
||||
default: "false"
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
translations:
|
||||
name: Translations
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Commit and create PR
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="chore/update-translations-$(date +%s)"
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): localize to languages other than English"
|
||||
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
|
||||
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller
|
||||
111
.github/workflows/generate_translations.yml
vendored
Normal file
111
.github/workflows/generate_translations.yml
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
name: Generate Translations
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "ui/src/translations/en.json"
|
||||
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
jobs:
|
||||
commit:
|
||||
name: Commit directly to PR
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == false }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 50
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Check for changes and commit
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
||||
pull_request:
|
||||
name: Open PR for a forked repository
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == true }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 10
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Create and push a new branch
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="generated-translations-${{ github.event.pull_request.head.repo.name }}"
|
||||
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin $BRANCH_NAME
|
||||
1
.github/workflows/gradle-release-plugins.yml
vendored
1
.github/workflows/gradle-release-plugins.yml
vendored
@@ -44,6 +44,7 @@ jobs:
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
538
.github/workflows/main.yml
vendored
538
.github/workflows/main.yml
vendored
@@ -1,58 +1,532 @@
|
||||
name: Main Workflow
|
||||
name: Main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/*
|
||||
- develop
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
repository_dispatch:
|
||||
types: [rebuild]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: "Skip test"
|
||||
type: choice
|
||||
required: true
|
||||
default: "false"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
plugin-version:
|
||||
description: "Plugin version"
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
JAVA_VERSION: "21"
|
||||
DOCKER_APT_PACKAGES: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
DOCKER_PYTHON_LIBRARIES: kestra
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
jobs:
|
||||
tests:
|
||||
name: Execute tests
|
||||
uses: ./.github/workflows/workflow-test.yml
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins-list.outputs.plugins }}
|
||||
steps:
|
||||
|
||||
- name: Checkout current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout GitHub Actions
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Npm
|
||||
- name: Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Set Plugin List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Build with Gradle
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Set up Vars
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Build & Export Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
|
||||
check-frontend:
|
||||
name: Run frontend tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
reporter: github-pr-review # Change reporter.
|
||||
workdir: "ui"
|
||||
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:unit
|
||||
|
||||
- name: Install Playwright
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Build Storybook
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run build-storybook --quiet
|
||||
|
||||
- name: Serve Storybook and run tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:127.0.0.1:6006 && npm run test-storybook"
|
||||
|
||||
# Run Reusable Workflow from QA repository
|
||||
check-e2e:
|
||||
name: Check E2E Tests
|
||||
needs: build-artifacts
|
||||
if: ${{ (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') && !startsWith(github.ref, 'refs/tags/v') }}
|
||||
uses: ./.github/workflows/e2e.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
backends: ["postgres"]
|
||||
with:
|
||||
report-status: false
|
||||
tags: oss
|
||||
docker-artifact-name: ${{ needs.build-artifacts.outputs.docker-artifact-name }}
|
||||
docker-image-tag: kestra/kestra:${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
backend: ${{ matrix.backends }}
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
check:
|
||||
env:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
name: Check & Publish
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Build the docker-compose stack
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
|
||||
# Gradle check
|
||||
- name: Build with Gradle
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# Sonar
|
||||
- name: Analyze with Sonar
|
||||
if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: "google-github-actions/auth@v2"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: "Set up Cloud SDK"
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
uses: "google-github-actions/setup-gcloud@v2"
|
||||
|
||||
- name: "Copy jacoco files"
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# report test
|
||||
- name: Test Report
|
||||
uses: mikepenz/action-junit-report@v5
|
||||
if: success() || failure()
|
||||
with:
|
||||
report_paths: "**/build/test-results/**/TEST-*.xml"
|
||||
|
||||
# Codecov
|
||||
- uses: codecov/codecov-action@v5
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
release:
|
||||
name: Release
|
||||
needs: [tests]
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
name: Github Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check, check-e2e]
|
||||
if: |
|
||||
always() &&
|
||||
startsWith(github.ref, 'refs/tags/v') &&
|
||||
needs.check.result == 'success' &&
|
||||
(
|
||||
github.event.inputs.skip-test == 'true' ||
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
)
|
||||
steps:
|
||||
# Download Exec
|
||||
- name: Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
id: create_github_release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: steps.create_github_release.conclusion == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
|
||||
docker:
|
||||
name: Publish Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-artifacts, check, check-e2e]
|
||||
if: |
|
||||
always() &&
|
||||
github.ref == 'refs/heads/develop' &&
|
||||
needs.check.result == 'success' &&
|
||||
(
|
||||
github.event.inputs.skip-test == 'true' ||
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
)
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: ${{needs.build-artifacts.outputs.docker-tag}}-no-plugins
|
||||
packages: ""
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{needs.build-artifacts.outputs.docker-tag}}
|
||||
plugins: ${{needs.build-artifacts.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Vars
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Docker Image
|
||||
- name: Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{matrix.image.packages}}
|
||||
PYTHON_LIBRARIES=${{matrix.image.python-libraries}}
|
||||
|
||||
maven:
|
||||
name: Publish to Maven
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check, check-e2e]
|
||||
if: |
|
||||
always() &&
|
||||
(github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/tags/v')) &&
|
||||
needs.check.result == 'success' &&
|
||||
(
|
||||
github.event.inputs.skip-test == 'true' ||
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish package to Sonatype
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype
|
||||
|
||||
# Release
|
||||
- name: Release package to Maven Central
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
run: |
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-e2e
|
||||
- check
|
||||
- maven
|
||||
- docker
|
||||
- release
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
# Update
|
||||
- name: Github - Update internal
|
||||
- name: Update internal
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
if: github.ref == 'refs/heads/develop' && needs.docker.result == 'success'
|
||||
with:
|
||||
@@ -62,7 +536,7 @@ jobs:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
|
||||
# Slack
|
||||
- name: Slack - Notification
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
|
||||
67
.github/workflows/pull-request.yml
vendored
67
.github/workflows/pull-request.yml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Pull Request Workflow
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
translations: ${{ steps.changes.outputs.translations }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: [file-changes]
|
||||
if: "needs.file-changes.outputs.ui == 'true'"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true'"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
end:
|
||||
name: End
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs: [frontend, backend]
|
||||
steps:
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR"
|
||||
1
.github/workflows/setversion-tag-plugins.yml
vendored
1
.github/workflows/setversion-tag-plugins.yml
vendored
@@ -24,6 +24,7 @@ jobs:
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
10
.github/workflows/vulnerabilities-check.yml
vendored
10
.github/workflows/vulnerabilities-check.yml
vendored
@@ -33,12 +33,6 @@ jobs:
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Npm
|
||||
- name: Npm - Install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Run OWASP dependency check plugin
|
||||
- name: Gradle Dependency Check
|
||||
env:
|
||||
@@ -80,7 +74,7 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: table
|
||||
@@ -113,7 +107,7 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
|
||||
139
.github/workflows/workflow-backend-test.yml
vendored
139
.github/workflows/workflow-backend-test.yml
vendored
@@ -1,139 +0,0 @@
|
||||
name: Backend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
SONAR_TOKEN:
|
||||
description: 'Sonar Token'
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Setup - Start docker compose
|
||||
shell: bash
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
|
||||
# Gradle check
|
||||
- name: Gradle - Build
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
reporter: java-junit
|
||||
path: '**/build/test-results/test/TEST-*.xml'
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
if: env.SONAR_TOKEN != ''
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
shell: bash
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# GCP
|
||||
- name: GCP - Auth with unit test account
|
||||
id: auth
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
uses: "google-github-actions/auth@v2"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- name: GCP - Setup Cloud SDK
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
uses: "google-github-actions/setup-gcloud@v2"
|
||||
|
||||
# Allure check
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
name: Allure - Generate slug variables
|
||||
|
||||
- name: Allure - Publish report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: Jacoco - Copy reports
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# Codecov
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
152
.github/workflows/workflow-build-artifacts.yml
vendored
152
.github/workflows/workflow-build-artifacts.yml
vendored
@@ -1,152 +0,0 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
outputs:
|
||||
docker-tag:
|
||||
value: ${{ jobs.build.outputs.docker-tag }}
|
||||
description: "The Docker image Tag for Kestra"
|
||||
docker-artifact-name:
|
||||
value: ${{ jobs.build.outputs.docker-artifact-name }}
|
||||
description: "The GitHub artifact containing the Kestra docker image name."
|
||||
plugins:
|
||||
value: ${{ jobs.build.outputs.plugins }}
|
||||
description: "The Kestra plugins list used for the build."
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build - Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Npm
|
||||
- name: Setup - Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Plugins - Set List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Gradle - Build
|
||||
shell: bash
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Artifacts - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Setup - Docker vars
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Docker - Build & export image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Artifacts - Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Artifacts - Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Artifacts - Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
84
.github/workflows/workflow-frontend-test.yml
vendored
84
.github/workflows/workflow-frontend-test.yml
vendored
@@ -1,84 +0,0 @@
|
||||
name: Frontend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
|
||||
env:
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Npm - install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm - lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
reporter: github-pr-review
|
||||
workdir: ui
|
||||
|
||||
- name: Npm - Run build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:cicd
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Storybook - Build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run build-storybook --quiet
|
||||
|
||||
- name: Storybook - Run tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:127.0.0.1:6006 && npm run test:storybook"
|
||||
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
flags: frontend
|
||||
48
.github/workflows/workflow-github-release.yml
vendored
48
.github/workflows/workflow-github-release.yml
vendored
@@ -1,48 +0,0 @@
|
||||
name: Github - Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "The Github personal token."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Github - Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Download Exec
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
# GitHub Release
|
||||
- name: GitHub - Create release
|
||||
id: create_github_release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: steps.create_github_release.conclusion == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
100
.github/workflows/workflow-publish-docker.yml
vendored
100
.github/workflows/workflow-publish-docker.yml
vendored
@@ -1,100 +0,0 @@
|
||||
name: Publish - Docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
||||
publish:
|
||||
name: Publish - Docker
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}-no-plugins
|
||||
packages: jattach
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
plugins: ${{ needs.build-artifacts.outputs.plugins }}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Docker - Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Vars
|
||||
- name: Docker - Set image name
|
||||
shell: bash
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Docker - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Docker - Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libraries }}
|
||||
57
.github/workflows/workflow-publish-maven.yml
vendored
57
.github/workflows/workflow-publish-maven.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Publish - Maven
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish - Maven
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Setup build
|
||||
- name: Setup - Build
|
||||
uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish - Release package to Maven Central
|
||||
shell: bash
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype ${{ startsWith(github.ref, 'refs/tags/v') && 'closeAndReleaseSonatypeStagingRepository' || '' }}
|
||||
|
||||
# Gradle dependency
|
||||
- name: Java - Gradle dependency graph
|
||||
uses: gradle/actions/dependency-submission@v4
|
||||
73
.github/workflows/workflow-release.yml
vendored
73
.github/workflows/workflow-release.yml
vendored
@@ -1,73 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
uses: ./.github/workflows/workflow-publish-maven.yml
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
97
.github/workflows/workflow-test.yml
vendored
97
.github/workflows/workflow-test.yml
vendored
@@ -1,97 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * 1,2,3,4,5'
|
||||
workflow_call:
|
||||
inputs:
|
||||
report-status:
|
||||
description: "Report status of the jobs in outputs"
|
||||
type: string
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
frontend_status:
|
||||
description: "Status of the frontend job"
|
||||
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status:
|
||||
description: "Status of the backend job"
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
# Output every job status
|
||||
# To be used in other workflows
|
||||
report-status:
|
||||
name: Report Status
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: always() && (inputs.report-status == 'true')
|
||||
outputs:
|
||||
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
|
||||
steps:
|
||||
- id: set-frontend-status
|
||||
name: Set frontend job status
|
||||
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
|
||||
|
||||
- id: set-backend-status
|
||||
name: Set backend job status
|
||||
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
|
||||
|
||||
notify:
|
||||
name: Notify - Slack
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: |
|
||||
always() && (needs.frontend.result != 'success' ||
|
||||
needs.backend.result != 'success')
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -36,8 +36,6 @@ yarn.lock
|
||||
ui/coverage
|
||||
ui/stats.html
|
||||
ui/.frontend-gradle-plugin
|
||||
ui/utils/CHANGELOG.md
|
||||
ui/test-report.junit.xml
|
||||
|
||||
### Docker
|
||||
/.env
|
||||
@@ -58,4 +56,3 @@ core/src/main/resources/gradle.properties
|
||||
**/allure-results/*
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
|
||||
1
.plugins
1
.plugins
@@ -32,7 +32,6 @@
|
||||
#plugin-git:io.kestra.plugin:plugin-git:LATEST
|
||||
#plugin-github:io.kestra.plugin:plugin-github:LATEST
|
||||
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
|
||||
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
|
||||
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
|
||||
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
|
||||
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
|
||||
|
||||
73
Makefile
73
Makefile
@@ -17,8 +17,6 @@ VERSION := $(shell ./gradlew properties -q | awk '/^version:/ {print $$2}')
|
||||
GIT_COMMIT := $(shell git rev-parse --short HEAD)
|
||||
GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
|
||||
DATE := $(shell date --rfc-3339=seconds)
|
||||
PLUGIN_GIT_DIR ?= $(pwd)/..
|
||||
PLUGIN_JARS_DIR ?= $(pwd)/locals/plugins
|
||||
|
||||
DOCKER_IMAGE = kestra/kestra
|
||||
DOCKER_PATH = ./
|
||||
@@ -89,7 +87,7 @@ build-docker: build-exec
|
||||
--compress \
|
||||
--rm \
|
||||
-f ./Dockerfile \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip" \
|
||||
--build-arg="PYTHON_LIBRARIES=kestra" \
|
||||
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
|
||||
|
||||
@@ -176,72 +174,3 @@ start-standalone-postgres: kill --private-start-standalone-postgres health
|
||||
|
||||
start-standalone-local: kill --private-start-standalone-local health
|
||||
|
||||
#checkout all plugins
|
||||
clone-plugins:
|
||||
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
|
||||
@mkdir -p "$(PLUGIN_GIT_DIR)"
|
||||
@echo "Fetching repository list from GitHub..."
|
||||
@REPOS=$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-") \
|
||||
for repo in $$REPOS; do \
|
||||
if [[ $$repo == plugin-* ]]; then \
|
||||
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
|
||||
echo "Skipping: $$repo (Already cloned)"; \
|
||||
else \
|
||||
echo "Cloning: $$repo using SSH..."; \
|
||||
git clone "git@github.com:kestra-io/$$repo.git" "$(PLUGIN_GIT_DIR)/$$repo"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done
|
||||
@echo "Done!"
|
||||
|
||||
# Update all plugins jar
|
||||
build-plugins:
|
||||
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
@MASTER_REPOS=(); \
|
||||
for repo in "$(PLUGIN_GIT_DIR)"/*; do \
|
||||
if [ -d "$$repo/.git" ]; then \
|
||||
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
|
||||
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
|
||||
MASTER_REPOS+=("$$repo"); \
|
||||
else \
|
||||
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 2: Update Repos on Master or Main Branch === \
|
||||
echo "⬇️ Updating repositories on master or main branch..."; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
echo "🔄 Updating: $$(basename "$$repo")"; \
|
||||
git -C "$$repo" pull --rebase; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 3: Build with Gradle === \
|
||||
echo "⚙️ Building repositories with Gradle..."; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
echo "🔨 Building: $$(basename "$$repo")"; \
|
||||
gradle clean build -x test shadowJar -p "$$repo"; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 4: Copy Latest JARs (Ignoring javadoc & sources) === \
|
||||
echo "📦 Organizing built JARs..."; \
|
||||
mkdir -p "$(PLUGIN_JARS_DIR)"; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
REPO_NAME=$$(basename "$$repo"); \
|
||||
\
|
||||
JARS=($$(find "$$repo" -type f -name "plugin-*.jar" ! -name "*-javadoc.jar" ! -name "*-sources.jar")); \
|
||||
if [ $${#JARS[@]} -eq 0 ]; then \
|
||||
echo "⚠️ Warning: No valid plugin JARs found for $$REPO_NAME"; \
|
||||
continue; \
|
||||
fi; \
|
||||
\
|
||||
for jar in "$${JARS[@]}"; do \
|
||||
JAR_NAME=$$(basename "$$jar"); \
|
||||
BASE_NAME=$$(echo "$$JAR_NAME" | sed -E 's/(-[0-9]+.*)?\.jar$$//'); \
|
||||
rm -f "$(PLUGIN_JARS_DIR)/$$BASE_NAME"-[0-9]*.jar; \
|
||||
cp "$$jar" "$(PLUGIN_JARS_DIR)/"; \
|
||||
echo "✅ Copied JAR: $$JAR_NAME"; \
|
||||
done; \
|
||||
done; \
|
||||
\
|
||||
echo "🎉 Done! All master and main branch repos updated, built, and organized."
|
||||
@@ -24,13 +24,6 @@
|
||||
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://trendshift.io/repositories/2714" target="_blank">
|
||||
<img src="https://trendshift.io/api/badge/repositories/2714" alt="kestra-io%2Fkestra | Trendshift" width="250" height="55"/>
|
||||
</a>
|
||||
<a href="https://www.producthunt.com/posts/kestra?embed=true&utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-kestra" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=612077&theme=light&period=daily&t=1740737506162" alt="Kestra - All-in-one automation & orchestration platform | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://go.kestra.io/video/product-overview" target="_blank">
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
|
||||
@@ -54,7 +47,7 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
- **Structure & Resilience**: tame chaos and bring resilience to your workflows with **namespaces**, **labels**, **subflows**, **retries**, **timeout**, **error handling**, **inputs**, **outputs** that generate artifacts in the UI, **variables**, **conditional branching**, **advanced scheduling**, **event triggers**, **backfills**, **dynamic tasks**, **sequential and parallel tasks**, and skip tasks or triggers when needed by setting the flag `disabled` to `true`.
|
||||
|
||||
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
|
||||
|
||||
<p align="center">
|
||||
|
||||
13
build.gradle
13
build.gradle
@@ -16,7 +16,7 @@ plugins {
|
||||
id "java"
|
||||
id 'java-library'
|
||||
id "idea"
|
||||
id "com.gradleup.shadow" version "8.3.6"
|
||||
id "com.gradleup.shadow" version "8.3.5"
|
||||
id "application"
|
||||
|
||||
// test
|
||||
@@ -28,18 +28,18 @@ plugins {
|
||||
id "com.github.ben-manes.versions" version "0.52.0"
|
||||
|
||||
// front
|
||||
id 'com.github.node-gradle.node' version '7.1.0'
|
||||
id 'org.siouan.frontend-jdk21' version '10.0.0' apply false
|
||||
|
||||
// release
|
||||
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
|
||||
id 'net.researchgate.release' version '3.1.0'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.0"
|
||||
id "com.gorylenko.gradle-git-properties" version "2.4.2"
|
||||
id 'signing'
|
||||
id 'ru.vyarus.pom' version '3.0.0' apply false
|
||||
id 'ru.vyarus.github-info' version '2.0.0' apply false
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.1.0" apply false
|
||||
id "org.owasp.dependencycheck" version "12.0.1" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -74,7 +74,7 @@ dependencies {
|
||||
**********************************************************************************************************************/
|
||||
allprojects {
|
||||
if (it.name != 'platform') {
|
||||
group = "io.kestra"
|
||||
group "io.kestra"
|
||||
|
||||
java {
|
||||
sourceCompatibility = targetJavaVersion
|
||||
@@ -121,6 +121,7 @@ allprojects {
|
||||
micronaut "io.micronaut:micronaut-management"
|
||||
micronaut "io.micrometer:micrometer-core"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
micronaut "io.micronaut:micronaut-http-client"
|
||||
micronaut "io.micronaut.reactor:micronaut-reactor-http-client"
|
||||
micronaut "io.micronaut.tracing:micronaut-tracing-opentelemetry-http"
|
||||
@@ -279,7 +280,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.23"
|
||||
agent "org.aspectj:aspectjweaver:1.9.22.1"
|
||||
}
|
||||
|
||||
test {
|
||||
|
||||
@@ -12,9 +12,18 @@ dependencies {
|
||||
implementation 'ch.qos.logback.contrib:logback-json-classic'
|
||||
implementation 'ch.qos.logback.contrib:logback-jackson'
|
||||
|
||||
// OTLP metrics
|
||||
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
|
||||
// plugins
|
||||
implementation 'org.eclipse.aether:aether-api'
|
||||
implementation 'org.eclipse.aether:aether-spi'
|
||||
implementation 'org.eclipse.aether:aether-util'
|
||||
implementation 'org.eclipse.aether:aether-impl'
|
||||
implementation 'org.eclipse.aether:aether-connector-basic'
|
||||
implementation 'org.eclipse.aether:aether-transport-file'
|
||||
implementation 'org.eclipse.aether:aether-transport-http'
|
||||
implementation('org.apache.maven:maven-aether-provider') {
|
||||
// sisu dependency injector is not used
|
||||
exclude group: 'org.eclipse.sisu'
|
||||
}
|
||||
// aether still use javax.inject
|
||||
compileOnly 'javax.inject:javax.inject:1'
|
||||
|
||||
@@ -34,7 +43,4 @@ dependencies {
|
||||
implementation project(":storage-local")
|
||||
|
||||
implementation project(":webserver")
|
||||
|
||||
//test
|
||||
testImplementation "org.wiremock:wiremock"
|
||||
}
|
||||
|
||||
@@ -4,17 +4,16 @@ import ch.qos.logback.classic.LoggerContext;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.commands.servers.ServerCommandInterface;
|
||||
import io.kestra.cli.services.StartupHookInterface;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.webserver.services.FlowAutoLoaderService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.yaml.YamlPropertySourceLoader;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import io.micronaut.management.endpoint.EndpointDefaultConfiguration;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import jakarta.inject.Provider;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import io.kestra.core.utils.Rethrow;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -27,13 +26,10 @@ import java.nio.file.Paths;
|
||||
import java.text.MessageFormat;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Callable;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
versionProvider = VersionProvider.class,
|
||||
mixinStandardHelpOptions = true,
|
||||
showDefaultValues = true
|
||||
@@ -53,28 +49,22 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
@Inject
|
||||
private io.kestra.core.utils.VersionProvider versionProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginRegistry> pluginRegistryProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginManager> pluginManagerProvider;
|
||||
|
||||
private PluginRegistry pluginRegistry;
|
||||
|
||||
@Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
@CommandLine.Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
private boolean[] verbose = new boolean[0];
|
||||
|
||||
@Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
@CommandLine.Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
private LogLevel logLevel = LogLevel.INFO;
|
||||
|
||||
@Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
@CommandLine.Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
private boolean internalLog = false;
|
||||
|
||||
@Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
@CommandLine.Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
private Path config = Paths.get(System.getProperty("user.home"), ".kestra/config.yml");
|
||||
|
||||
@Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = Optional.ofNullable(System.getenv("KESTRA_PLUGINS_PATH")).map(Paths::get).orElse(null);
|
||||
@CommandLine.Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = System.getenv("KESTRA_PLUGINS_PATH") != null ? Paths.get(System.getenv("KESTRA_PLUGINS_PATH")) : null;
|
||||
|
||||
public enum LogLevel {
|
||||
TRACE,
|
||||
@@ -86,7 +76,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(Command.class).name());
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(CommandLine.Command.class).name());
|
||||
startLogger();
|
||||
sendServerLog();
|
||||
if (this.startupHook != null) {
|
||||
@@ -94,14 +84,8 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
}
|
||||
|
||||
if (this.pluginsPath != null && loadExternalPlugins()) {
|
||||
pluginRegistry = pluginRegistryProvider.get();
|
||||
pluginRegistry = pluginRegistry();
|
||||
pluginRegistry.registerIfAbsent(pluginsPath);
|
||||
|
||||
// PluginManager mus only be initialized if a registry is also instantiated
|
||||
if (isPluginManagerEnabled()) {
|
||||
PluginManager manager = pluginManagerProvider.get();
|
||||
manager.start();
|
||||
}
|
||||
}
|
||||
|
||||
startWebserver();
|
||||
@@ -118,15 +102,8 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies whether the {@link PluginManager} service must be initialized.
|
||||
* <p>
|
||||
* This method can be overridden by concrete commands.
|
||||
*
|
||||
* @return {@code true} if the {@link PluginManager} service must be initialized.
|
||||
*/
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return true;
|
||||
protected PluginRegistry pluginRegistry() {
|
||||
return KestraContext.getContext().getPluginRegistry(); // Lazy init
|
||||
}
|
||||
|
||||
private static String message(String message, Object... format) {
|
||||
@@ -180,6 +157,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
logger.getName().startsWith("io.kestra") &&
|
||||
!logger.getName().startsWith("io.kestra.ee.runner.kafka.services"))
|
||||
)
|
||||
|| logger.getName().startsWith("flow")
|
||||
)
|
||||
.forEach(
|
||||
logger -> logger.setLevel(ch.qos.logback.classic.Level.valueOf(this.logLevel.name()))
|
||||
@@ -205,9 +183,9 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
if (this.endpointConfiguration.getPort().isPresent()) {
|
||||
URI endpoint = null;
|
||||
try {
|
||||
endpoint = UriBuilder.of(server.getURL().toURI())
|
||||
.port(this.endpointConfiguration.getPort().get())
|
||||
.path("/health")
|
||||
endpoint = new URIBuilder(server.getURL().toURI())
|
||||
.setPort(this.endpointConfiguration.getPort().get())
|
||||
.setPath("/health")
|
||||
.build();
|
||||
} catch (URISyntaxException e) {
|
||||
e.printStackTrace();
|
||||
@@ -229,12 +207,10 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected void shutdownHook(boolean logShutdown, Rethrow.RunnableChecked<Exception> run) {
|
||||
protected void shutdownHook(Rethrow.RunnableChecked<Exception> run) {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(
|
||||
() -> {
|
||||
if (logShutdown) {
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
}
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
try {
|
||||
run.run();
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -88,12 +88,11 @@ public class App implements Callable<Integer> {
|
||||
.environments(Environment.CLI);
|
||||
|
||||
CommandLine cmd = new CommandLine(mainClass, CommandLine.defaultFactory());
|
||||
continueOnParsingErrors(cmd);
|
||||
|
||||
CommandLine.ParseResult parseResult = cmd.parseArgs(args);
|
||||
List<CommandLine> parsedCommands = parseResult.asCommandLineList();
|
||||
|
||||
CommandLine commandLine = parsedCommands.getLast();
|
||||
CommandLine commandLine = parsedCommands.get(parsedCommands.size() - 1);
|
||||
Class<?> cls = commandLine.getCommandSpec().userObject().getClass();
|
||||
|
||||
if (AbstractCommand.class.isAssignableFrom(cls)) {
|
||||
@@ -115,17 +114,15 @@ public class App implements Callable<Integer> {
|
||||
.stream()
|
||||
.filter(argSpec -> ((Field) argSpec.userObject()).getName().equals("serverPort"))
|
||||
.findFirst()
|
||||
.ifPresent(argSpec -> properties.put("micronaut.server.port", argSpec.getValue()));
|
||||
.ifPresent(argSpec -> {
|
||||
properties.put("micronaut.server.port", argSpec.getValue());
|
||||
});
|
||||
|
||||
builder.properties(properties);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static void continueOnParsingErrors(CommandLine cmd) {
|
||||
cmd.getCommandSpec().parser().collectErrors(true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T getPropertiesFromMethod(Class<?> cls, String methodName, Object instance) {
|
||||
try {
|
||||
|
||||
@@ -18,8 +18,6 @@ import picocli.CommandLine;
|
||||
FlowNamespaceCommand.class,
|
||||
FlowDotCommand.class,
|
||||
FlowExportCommand.class,
|
||||
FlowUpdateCommand.class,
|
||||
FlowUpdatesCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
|
||||
@@ -33,9 +33,6 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
|
||||
public String namespace;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -61,12 +58,8 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
body = String.join("\n---\n", flows);
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
String namespaceQuery = "";
|
||||
if (namespace != null) {
|
||||
namespaceQuery = "&namespace=" + namespace;
|
||||
}
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/bulk") + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/bulk") + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -39,7 +39,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
Flow flow = (Flow) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow, this.tenantId));
|
||||
warnings.addAll(flowService.warnings(flow));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
|
||||
@@ -1,37 +1,31 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import picocli.CommandLine.Command;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
name = "plugins",
|
||||
description = "Manage plugins",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
PluginInstallCommand.class,
|
||||
PluginUninstallCommand.class,
|
||||
PluginListCommand.class,
|
||||
PluginDocCommand.class,
|
||||
PluginSearchCommand.class
|
||||
PluginDocCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class PluginCommand extends AbstractCommand {
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.io.Files;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.docs.DocumentationGenerator;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -43,10 +43,8 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
super.call();
|
||||
DocumentationGenerator documentationGenerator = applicationContext.getBean(DocumentationGenerator.class);
|
||||
|
||||
PluginRegistry registry = pluginRegistryProvider.get();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
boolean hasFailures = false;
|
||||
|
||||
for (RegisteredPlugin registeredPlugin : plugins) {
|
||||
try {
|
||||
documentationGenerator
|
||||
@@ -63,7 +61,7 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
Files
|
||||
.asCharSink(
|
||||
file,
|
||||
StandardCharsets.UTF_8
|
||||
Charsets.UTF_8
|
||||
).write(s.getBody());
|
||||
stdOut("Generate doc in: {0}", file);
|
||||
|
||||
@@ -103,10 +101,4 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
|
||||
return hasFailures ? 1 : 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,123 +1,98 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginCatalogService;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.plugins.PluginDownloader;
|
||||
import io.kestra.cli.plugins.RepositoryConfig;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import jakarta.inject.Provider;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
@Command(
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "install",
|
||||
description = "Install plugins"
|
||||
)
|
||||
public class PluginInstallCommand extends AbstractCommand {
|
||||
|
||||
@Option(names = {"--locally"}, description = "Specifies if plugins must be installed locally. If set to false the installation depends on your Kestra configuration.")
|
||||
boolean locally = true;
|
||||
|
||||
@Option(names = {"--all"}, description = "Install all available plugins")
|
||||
boolean all = false;
|
||||
|
||||
@Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
@CommandLine.Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates.")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
@CommandLine.Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
private URI[] repositories;
|
||||
|
||||
@Spec
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Inject
|
||||
@Client("api") HttpClient httpClient;
|
||||
private PluginDownloader pluginDownloader;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (this.locally && this.pluginsPath == null) {
|
||||
if (this.pluginsPath == null) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " +
|
||||
"or environment variable 'KESTRA_PLUGINS_PATH"
|
||||
);
|
||||
}
|
||||
|
||||
List<MavenPluginRepositoryConfig> repositoryConfigs = List.of();
|
||||
if (!pluginsPath.toFile().exists()) {
|
||||
if (!pluginsPath.toFile().mkdir()) {
|
||||
throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
if (repositories != null) {
|
||||
repositoryConfigs = Arrays.stream(repositories)
|
||||
.map(uri -> {
|
||||
MavenPluginRepositoryConfig.MavenPluginRepositoryConfigBuilder builder = MavenPluginRepositoryConfig
|
||||
.builder()
|
||||
Arrays.stream(repositories)
|
||||
.forEach(throwConsumer(s -> {
|
||||
URIBuilder uriBuilder = new URIBuilder(s);
|
||||
|
||||
RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder()
|
||||
.id(IdUtils.create());
|
||||
|
||||
String userInfo = uri.getUserInfo();
|
||||
if (userInfo != null) {
|
||||
String[] userInfoParts = userInfo.split(":");
|
||||
builder = builder.basicAuth(new MavenPluginRepositoryConfig.BasicAuth(
|
||||
userInfoParts[0],
|
||||
userInfoParts[1]
|
||||
if (uriBuilder.getUserInfo() != null) {
|
||||
int index = uriBuilder.getUserInfo().indexOf(":");
|
||||
|
||||
builder.basicAuth(new RepositoryConfig.BasicAuth(
|
||||
uriBuilder.getUserInfo().substring(0, index),
|
||||
uriBuilder.getUserInfo().substring(index + 1)
|
||||
));
|
||||
|
||||
uriBuilder.setUserInfo(null);
|
||||
}
|
||||
builder.url(UriBuilder.of(uri).userInfo(null).build().toString());
|
||||
return builder.build();
|
||||
}).toList();
|
||||
|
||||
builder.url(uriBuilder.build().toString());
|
||||
|
||||
pluginDownloader.addRepository(builder.build());
|
||||
}));
|
||||
}
|
||||
|
||||
if (all) {
|
||||
PluginCatalogService service = new PluginCatalogService(httpClient, false, true);
|
||||
dependencies = service.get().stream().map(Objects::toString).toList();
|
||||
}
|
||||
List<URL> resolveUrl = pluginDownloader.resolve(dependencies);
|
||||
stdOut("Resolved Plugin(s) with {0}", resolveUrl);
|
||||
|
||||
if (dependencies.isEmpty()) {
|
||||
stdErr("Error: No plugin to install.");
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
final List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
try (final PluginManager pluginManager = getPluginManager()) {
|
||||
List<PluginArtifact> installed = pluginManager.install(
|
||||
pluginArtifacts,
|
||||
repositoryConfigs,
|
||||
false,
|
||||
pluginsPath
|
||||
for (URL url: resolveUrl) {
|
||||
Files.copy(
|
||||
Paths.get(url.toURI()),
|
||||
Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
);
|
||||
|
||||
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
}
|
||||
|
||||
private PluginManager getPluginManager() {
|
||||
return locally ? new LocalPluginManager(mavenPluginRepositoryProvider.get()) : this.pluginManagerProvider.get();
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,31 +1,22 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
name = "list",
|
||||
description = "List all plugins already installed"
|
||||
)
|
||||
public class PluginListCommand extends AbstractCommand {
|
||||
@Spec
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
private boolean core = false;
|
||||
|
||||
@Inject
|
||||
private PluginRegistry registry;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -36,8 +27,7 @@ public class PluginListCommand extends AbstractCommand {
|
||||
);
|
||||
}
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
plugins.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
name = "search",
|
||||
description = "Search for available Kestra plugins"
|
||||
)
|
||||
public class PluginSearchCommand extends AbstractCommand {
|
||||
@Inject
|
||||
@Client("api")
|
||||
private HttpClient httpClient;
|
||||
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
private static final char SPACE = ' ';
|
||||
|
||||
@Parameters(index = "0", description = "Search term (optional)", defaultValue = "")
|
||||
private String searchTerm;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try {
|
||||
JsonNode root = fetchPlugins();
|
||||
List<PluginInfo> plugins = findPlugins(root);
|
||||
printResults(plugins);
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
stdOut("Error processing plugins: {0}", e.getMessage());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private JsonNode fetchPlugins() throws Exception {
|
||||
String response = httpClient.toBlocking()
|
||||
.retrieve(
|
||||
HttpRequest.GET("/v1/plugins")
|
||||
.header("Accept", "application/json")
|
||||
);
|
||||
return MAPPER.readTree(response);
|
||||
}
|
||||
|
||||
private List<PluginInfo> findPlugins(JsonNode root) {
|
||||
String searchTermLower = searchTerm.toLowerCase();
|
||||
List<PluginInfo> plugins = new ArrayList<>();
|
||||
|
||||
for (JsonNode plugin : root) {
|
||||
if (matchesSearch(plugin, searchTermLower)) {
|
||||
plugins.add(new PluginInfo(
|
||||
plugin.path("name").asText(),
|
||||
plugin.path("title").asText(),
|
||||
plugin.path("group").asText(),
|
||||
plugin.path("version").asText("")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
plugins.sort((p1, p2) -> p1.name.compareToIgnoreCase(p2.name));
|
||||
return plugins;
|
||||
}
|
||||
|
||||
private boolean matchesSearch(JsonNode plugin, String term) {
|
||||
if (term.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return plugin.path("name").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("title").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("group").asText().toLowerCase().contains(term);
|
||||
}
|
||||
|
||||
private void printResults(List<PluginInfo> plugins) {
|
||||
if (plugins.isEmpty()) {
|
||||
stdOut("No plugins found{0}",
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'");
|
||||
return;
|
||||
}
|
||||
|
||||
stdOut("\nFound {0} plugins{1}",
|
||||
plugins.size(),
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'"
|
||||
);
|
||||
|
||||
printPluginsTable(plugins);
|
||||
}
|
||||
|
||||
private void printPluginsTable(List<PluginInfo> plugins) {
|
||||
int maxName = 4, maxTitle = 5, maxGroup = 5;
|
||||
for (PluginInfo plugin : plugins) {
|
||||
maxName = Math.max(maxName, plugin.name.length());
|
||||
maxTitle = Math.max(maxTitle, plugin.title.length());
|
||||
maxGroup = Math.max(maxGroup, plugin.group.length());
|
||||
}
|
||||
|
||||
StringBuilder namePad = new StringBuilder(maxName);
|
||||
StringBuilder titlePad = new StringBuilder(maxTitle);
|
||||
StringBuilder groupPad = new StringBuilder(maxGroup);
|
||||
|
||||
stdOut("");
|
||||
printRow(namePad, titlePad, groupPad, "NAME", "TITLE", "GROUP", "VERSION",
|
||||
maxName, maxTitle, maxGroup);
|
||||
|
||||
for (PluginInfo plugin : plugins) {
|
||||
printRow(namePad, titlePad, groupPad, plugin.name, plugin.title, plugin.group, plugin.version,
|
||||
maxName, maxTitle, maxGroup);
|
||||
}
|
||||
stdOut("");
|
||||
}
|
||||
|
||||
private void printRow(StringBuilder namePad, StringBuilder titlePad, StringBuilder groupPad,
|
||||
String name, String title, String group, String version,
|
||||
int maxName, int maxTitle, int maxGroup) {
|
||||
stdOut("{0} {1} {2} {3}",
|
||||
pad(namePad, name, maxName),
|
||||
pad(titlePad, title, maxTitle),
|
||||
pad(groupPad, group, maxGroup),
|
||||
version
|
||||
);
|
||||
}
|
||||
|
||||
private String pad(StringBuilder sb, String str, int length) {
|
||||
sb.setLength(0);
|
||||
sb.append(str);
|
||||
while (sb.length() < length) {
|
||||
sb.append(SPACE);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private record PluginInfo(String name, String title, String group, String version) {}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "uninstall",
|
||||
description = "Uninstall plugins"
|
||||
)
|
||||
public class PluginUninstallCommand extends AbstractCommand {
|
||||
@Parameters(index = "0..*", description = "The plugins to uninstall. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
final PluginManager pluginManager;
|
||||
|
||||
// If a PLUGIN_PATH is provided, then use the LocalPluginManager
|
||||
if (pluginsPath != null) {
|
||||
pluginManager = new LocalPluginManager(mavenPluginRepositoryProvider.get());
|
||||
} else {
|
||||
// Otherwise, we delegate to the configured plugin-manager.
|
||||
pluginManager = this.pluginManagerProvider.get();
|
||||
}
|
||||
|
||||
List<PluginArtifact> uninstalled = pluginManager.uninstall(
|
||||
pluginArtifacts,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = uninstalled.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully uninstalled plugins {0} from {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,12 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import picocli.CommandLine;
|
||||
|
||||
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
|
||||
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
|
||||
Integer serverPort;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
|
||||
return super.call();
|
||||
}
|
||||
|
||||
protected static int defaultWorkerThread() {
|
||||
return Runtime.getRuntime().availableProcessors() * 4;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
@@ -8,6 +9,7 @@ import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
@@ -18,6 +20,7 @@ import java.util.Map;
|
||||
name = "executor",
|
||||
description = "Start the Kestra executor"
|
||||
)
|
||||
@Slf4j
|
||||
public class ExecutorCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -63,10 +66,13 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
|
||||
executorService.run();
|
||||
|
||||
log.info("Executor started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Map;
|
||||
@@ -14,6 +16,7 @@ import java.util.Map;
|
||||
name = "indexer",
|
||||
description = "Start the Kestra indexer"
|
||||
)
|
||||
@Slf4j
|
||||
public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -28,10 +31,13 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
|
||||
indexer.run();
|
||||
|
||||
log.info("Indexer started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.schedulers.AbstractScheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -30,10 +31,12 @@ public class SchedulerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
|
||||
scheduler.run();
|
||||
|
||||
log.info("Scheduler started");
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -12,6 +12,7 @@ import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.File;
|
||||
@@ -24,6 +25,7 @@ import java.util.Map;
|
||||
name = "standalone",
|
||||
description = "Start the standalone all-in-one server"
|
||||
)
|
||||
@Slf4j
|
||||
public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
@@ -89,11 +91,11 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
this.skipExecutionService.setSkipFlows(skipFlows);
|
||||
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
|
||||
this.skipExecutionService.setSkipTenants(skipTenants);
|
||||
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
if (flowPath != null) {
|
||||
try {
|
||||
@@ -122,6 +124,8 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
fileWatcher.startListeningFromConfig();
|
||||
}
|
||||
|
||||
this.shutdownHook(standAloneRunner::close);
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
@@ -55,11 +57,20 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
|
||||
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
|
||||
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
|
||||
shutdownHook(false, () -> poolExecutor.shutdown());
|
||||
}
|
||||
|
||||
log.info("Webserver started");
|
||||
this.shutdownHook(() -> {
|
||||
this.close();
|
||||
KestraContext.getContext().shutdown();
|
||||
});
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void close() {
|
||||
if (this.poolExecutor != null) {
|
||||
this.poolExecutor.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import io.kestra.core.runners.Worker;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@@ -17,6 +18,7 @@ import java.util.UUID;
|
||||
name = "worker",
|
||||
description = "Start the Kestra worker"
|
||||
)
|
||||
@Slf4j
|
||||
public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Inject
|
||||
@@ -37,11 +39,8 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(thread, workerGroupKey);
|
||||
|
||||
super.call();
|
||||
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
|
||||
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
|
||||
}
|
||||
@@ -53,6 +52,13 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
worker.run();
|
||||
|
||||
if (this.workerGroupKey != null) {
|
||||
log.info("Worker started with {} thread(s) in group '{}'", this.thread, this.workerGroupKey);
|
||||
}
|
||||
else {
|
||||
log.info("Worker started with {} thread(s)", this.thread);
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
153
cli/src/main/java/io/kestra/cli/plugins/PluginDownloader.java
Normal file
153
cli/src/main/java/io/kestra/cli/plugins/PluginDownloader.java
Normal file
@@ -0,0 +1,153 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.repository.internal.MavenRepositorySystemUtils;
|
||||
import org.eclipse.aether.DefaultRepositorySystemSession;
|
||||
import org.eclipse.aether.RepositorySystem;
|
||||
import org.eclipse.aether.RepositorySystemSession;
|
||||
import org.eclipse.aether.artifact.Artifact;
|
||||
import org.eclipse.aether.artifact.DefaultArtifact;
|
||||
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
|
||||
import org.eclipse.aether.impl.DefaultServiceLocator;
|
||||
import org.eclipse.aether.repository.LocalRepository;
|
||||
import org.eclipse.aether.repository.RemoteRepository;
|
||||
import org.eclipse.aether.resolution.*;
|
||||
import org.eclipse.aether.spi.connector.RepositoryConnectorFactory;
|
||||
import org.eclipse.aether.spi.connector.transport.TransporterFactory;
|
||||
import org.eclipse.aether.transport.file.FileTransporterFactory;
|
||||
import org.eclipse.aether.transport.http.HttpTransporterFactory;
|
||||
import org.eclipse.aether.util.repository.AuthenticationBuilder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class PluginDownloader {
|
||||
private final List<RepositoryConfig> repositoryConfigs;
|
||||
private final RepositorySystem system;
|
||||
private final RepositorySystemSession session;
|
||||
|
||||
@Inject
|
||||
public PluginDownloader(
|
||||
List<RepositoryConfig> repositoryConfigs,
|
||||
@Nullable @Value("${kestra.plugins.local-repository-path}") String localRepositoryPath
|
||||
) {
|
||||
this.repositoryConfigs = repositoryConfigs;
|
||||
this.system = repositorySystem();
|
||||
this.session = repositorySystemSession(system, localRepositoryPath);
|
||||
}
|
||||
|
||||
public void addRepository(RepositoryConfig repositoryConfig) {
|
||||
this.repositoryConfigs.add(repositoryConfig);
|
||||
}
|
||||
|
||||
public List<URL> resolve(List<String> dependencies) throws MalformedURLException, ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<RemoteRepository> repositories = remoteRepositories();
|
||||
|
||||
List<ArtifactResult> artifactResults = resolveArtifacts(repositories, dependencies);
|
||||
List<URL> localUrls = resolveUrls(artifactResults);
|
||||
log.debug("Resolved Plugin {} with {}", dependencies, localUrls);
|
||||
|
||||
return localUrls;
|
||||
}
|
||||
|
||||
private List<RemoteRepository> remoteRepositories() {
|
||||
return repositoryConfigs
|
||||
.stream()
|
||||
.map(repositoryConfig -> {
|
||||
var build = new RemoteRepository.Builder(
|
||||
repositoryConfig.getId(),
|
||||
"default",
|
||||
repositoryConfig.getUrl()
|
||||
);
|
||||
|
||||
if (repositoryConfig.getBasicAuth() != null) {
|
||||
var authenticationBuilder = new AuthenticationBuilder();
|
||||
authenticationBuilder.addUsername(repositoryConfig.getBasicAuth().getUsername());
|
||||
authenticationBuilder.addPassword(repositoryConfig.getBasicAuth().getPassword());
|
||||
|
||||
build.setAuthentication(authenticationBuilder.build());
|
||||
}
|
||||
|
||||
return build.build();
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static RepositorySystem repositorySystem() {
|
||||
DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator();
|
||||
locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
|
||||
locator.addService(TransporterFactory.class, FileTransporterFactory.class);
|
||||
locator.addService(TransporterFactory.class, HttpTransporterFactory.class);
|
||||
|
||||
return locator.getService(RepositorySystem.class);
|
||||
}
|
||||
|
||||
private RepositorySystemSession repositorySystemSession(RepositorySystem system, String localRepositoryPath) {
|
||||
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
|
||||
|
||||
if (localRepositoryPath == null) {
|
||||
try {
|
||||
final String tempDirectory = Files.createTempDirectory(this.getClass().getSimpleName().toLowerCase())
|
||||
.toAbsolutePath()
|
||||
.toString();
|
||||
|
||||
localRepositoryPath = tempDirectory;
|
||||
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
FileUtils.deleteDirectory(new File(tempDirectory));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
LocalRepository localRepo = new LocalRepository(localRepositoryPath);
|
||||
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
private List<ArtifactResult> resolveArtifacts(List<RemoteRepository> repositories, List<String> dependencies) throws ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<ArtifactResult> results = new ArrayList<>(dependencies.size());
|
||||
for (String dependency: dependencies) {
|
||||
var artifact = new DefaultArtifact(dependency);
|
||||
var version = system.resolveVersionRange(session, new VersionRangeRequest(artifact, repositories, null));
|
||||
var artifactRequest = new ArtifactRequest(
|
||||
new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "jar", version.getHighestVersion().toString()),
|
||||
repositories,
|
||||
null
|
||||
);
|
||||
var artifactResult = system.resolveArtifact(session, artifactRequest);
|
||||
results.add(artifactResult);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<URL> resolveUrls(List<ArtifactResult> artifactResults) throws MalformedURLException {
|
||||
ImmutableList.Builder<URL> urls = ImmutableList.builder();
|
||||
for (ArtifactResult artifactResult : artifactResults) {
|
||||
URL url;
|
||||
url = artifactResult.getArtifact().getFile().toPath().toUri().toURL();
|
||||
urls.add(url);
|
||||
}
|
||||
return urls.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class RepositoryConfig {
|
||||
String id;
|
||||
|
||||
String url;
|
||||
|
||||
BasicAuth basicAuth;
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public static class BasicAuth {
|
||||
private String username;
|
||||
private String password;
|
||||
}
|
||||
|
||||
public RepositoryConfig(@Parameter String id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,6 @@
|
||||
micronaut:
|
||||
application:
|
||||
name: kestra
|
||||
# Disable Micronaut Open Telemetry
|
||||
otel:
|
||||
enabled: false
|
||||
router:
|
||||
static-resources:
|
||||
swagger:
|
||||
@@ -74,13 +71,6 @@ micronaut:
|
||||
type: scheduled
|
||||
core-pool-size: 1
|
||||
|
||||
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
|
||||
metrics:
|
||||
export:
|
||||
otlp:
|
||||
enabled: false
|
||||
# url: http://localhost:4318/v1/metrics
|
||||
|
||||
jackson:
|
||||
serialization:
|
||||
writeDatesAsTimestamps: false
|
||||
@@ -145,11 +135,6 @@ kestra:
|
||||
initial-delay: 1h
|
||||
fixed-delay: 1h
|
||||
retention: 7d
|
||||
types:
|
||||
- type : io.kestra.core.models.executions.LogEntry
|
||||
retention: 1h
|
||||
- type: io.kestra.core.models.executions.MetricEntry
|
||||
retention: 1h
|
||||
|
||||
plugins:
|
||||
repositories:
|
||||
|
||||
@@ -45,20 +45,4 @@ class AppTest {
|
||||
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void missingRequiredParamsPrintHelpInsteadOfException() {
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
|
||||
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString(), startsWith("Missing required parameters: "));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace update "));
|
||||
assertThat(out.toString(), not(containsString("MissingParameterException: ")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junitpioneer.jupiter.RetryingTest;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
@@ -16,7 +15,7 @@ import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowCreateOrUpdateCommandTest {
|
||||
@RetryingTest(5) // flaky on CI but cannot be reproduced even with 100 repetitions
|
||||
@Test
|
||||
void runWithDelete() {
|
||||
URL directory = FlowCreateOrUpdateCommandTest.class.getClassLoader().getResource("flows");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
|
||||
@@ -109,33 +109,6 @@ class FlowUpdatesCommandTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalidWithNamespace() {
|
||||
URL directory = FlowUpdatesCommandTest.class.getClassLoader().getResource("flows");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"--namespace",
|
||||
"io.kestra.cli",
|
||||
"--delete",
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("Invalid entity: flow.namespace: io.kestra.outsider_quattro_-1 - flow namespace is invalid"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void helper() {
|
||||
URL directory = FlowUpdatesCommandTest.class.getClassLoader().getResource("helper");
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class PluginCommandTest {
|
||||
|
||||
@Test
|
||||
void shouldGetHelps() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(PluginCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString(), containsString("Usage: kestra plugins"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
@@ -16,7 +17,7 @@ import static org.hamcrest.Matchers.*;
|
||||
class PluginInstallCommandTest {
|
||||
|
||||
@Test
|
||||
void shouldInstallPluginLocallyGivenFixedVersion() throws IOException {
|
||||
void fixedVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -27,12 +28,12 @@ class PluginInstallCommandTest {
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("io_kestra_plugin__plugin-notifications__0_6_0.jar"));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("plugin-notifications-0.6.0.jar"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldInstallPluginLocallyGivenLatestVersion() throws IOException {
|
||||
void latestVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -43,13 +44,13 @@ class PluginInstallCommandTest {
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), startsWith("io_kestra_plugin__plugin-notifications__"));
|
||||
assertThat(files.getFirst().getFileName().toString(), startsWith("plugin-notifications"));
|
||||
assertThat(files.getFirst().getFileName().toString(), not(containsString("LATEST")));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldInstallPluginLocallyGivenRangeVersion() throws IOException {
|
||||
void rangeVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -61,7 +62,7 @@ class PluginInstallCommandTest {
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("io_kestra_storage__storage-s3__0_12_1.jar"));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("storage-s3-0.12.1.jar"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
@@ -24,7 +25,7 @@ class PluginListCommandTest {
|
||||
private static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.18.0-SNAPSHOT.jar";
|
||||
|
||||
@Test
|
||||
void shouldListPluginsInstalledLocally() throws IOException, URISyntaxException {
|
||||
void run() throws IOException, URISyntaxException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginListCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import com.github.tomakehurst.wiremock.junit5.WireMockTest;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.github.tomakehurst.wiremock.client.WireMock.*;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
@WireMockTest(httpPort = 28181)
|
||||
class PluginSearchCommandTest {
|
||||
private ByteArrayOutputStream outputStreamCaptor;
|
||||
private final PrintStream originalOut = System.out;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
outputStreamCaptor = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(outputStreamCaptor));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void tearDown() {
|
||||
System.setOut(originalOut);
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchWithExactMatch() {
|
||||
stubFor(get(urlEqualTo("/v1/plugins"))
|
||||
.willReturn(aResponse()
|
||||
.withHeader("Content-Type", "application/json")
|
||||
.withBody("""
|
||||
[
|
||||
{
|
||||
"name": "plugin-notifications",
|
||||
"title": "Notifications",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.6.0"
|
||||
},
|
||||
{
|
||||
"name": "plugin-scripts",
|
||||
"title": "Scripts",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.5.0"
|
||||
}
|
||||
]
|
||||
""")));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder(Environment.CLI, Environment.TEST)
|
||||
.properties(Map.of("micronaut.http.services.api.url", "http://localhost:28181"))
|
||||
.start()) {
|
||||
String[] args = {"notifications"};
|
||||
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
|
||||
|
||||
String output = outputStreamCaptor.toString().trim();
|
||||
assertThat(output, containsString("Found 1 plugins matching 'notifications'"));
|
||||
assertThat(output, containsString("plugin-notifications"));
|
||||
assertThat(output, not(containsString("plugin-scripts")));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchWithEmptyQuery() {
|
||||
stubFor(get(urlEqualTo("/v1/plugins"))
|
||||
.willReturn(aResponse()
|
||||
.withHeader("Content-Type", "application/json")
|
||||
.withBody("""
|
||||
[
|
||||
{
|
||||
"name": "plugin-notifications",
|
||||
"title": "Notifications",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.6.0"
|
||||
},
|
||||
{
|
||||
"name": "plugin-scripts",
|
||||
"title": "Scripts",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.5.0"
|
||||
}
|
||||
]
|
||||
""")));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder(Environment.CLI, Environment.TEST)
|
||||
.properties(Map.of("micronaut.http.services.api.url", "http://localhost:28181"))
|
||||
.start()) {
|
||||
|
||||
String[] args = {""};
|
||||
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
|
||||
|
||||
String output = outputStreamCaptor.toString().trim();
|
||||
assertThat(output, containsString("Found 2 plugins"));
|
||||
assertThat(output, containsString("plugin-notifications"));
|
||||
assertThat(output, containsString("plugin-scripts"));
|
||||
}
|
||||
}
|
||||
}
|
||||
76
codecov.yml
76
codecov.yml
@@ -1,76 +0,0 @@
|
||||
component_management:
|
||||
individual_components:
|
||||
- component_id: cli
|
||||
name: Cli
|
||||
paths:
|
||||
- cli/**
|
||||
- component_id: core
|
||||
name: Core
|
||||
paths:
|
||||
- core/**
|
||||
- component_id: e2e-tests
|
||||
name: End to End
|
||||
paths:
|
||||
- e2e-tests/**
|
||||
- component_id: jdbc
|
||||
name: Jdbc
|
||||
paths:
|
||||
- jdbc/**
|
||||
- component_id: jdbc-h2
|
||||
name: Jdbc H2
|
||||
paths:
|
||||
- jdbc-h2/**
|
||||
- component_id: jdbc-mysql
|
||||
name: Jdbc Mysql
|
||||
paths:
|
||||
- jdbc-mysql/**
|
||||
- component_id: jdbc-postgres
|
||||
name: Jdbc Postgres
|
||||
paths:
|
||||
- jdbc-postgres/**
|
||||
- component_id: model
|
||||
name: Model
|
||||
paths:
|
||||
- model/**
|
||||
- component_id: processor
|
||||
name: Processor
|
||||
paths:
|
||||
- processor/**
|
||||
- component_id: repository-memory
|
||||
name: Repository Memory
|
||||
paths:
|
||||
- repository-memory/**
|
||||
- component_id: runner-memory
|
||||
name: Runner Memory
|
||||
paths:
|
||||
- runner-memory/**
|
||||
- component_id: script
|
||||
name: Script
|
||||
paths:
|
||||
- script/**
|
||||
- component_id: storage-local
|
||||
name: Storage Local
|
||||
paths:
|
||||
- storage-local/**
|
||||
- component_id: tests
|
||||
name: Tests
|
||||
paths:
|
||||
- tests/**
|
||||
- component_id: ui
|
||||
name: Ui
|
||||
paths:
|
||||
- ui/**
|
||||
- component_id: webserver
|
||||
name: Webserver
|
||||
paths:
|
||||
- webserver/**
|
||||
|
||||
flag_management:
|
||||
default_rules:
|
||||
carryforward: true
|
||||
statuses:
|
||||
- type: project
|
||||
target: 80%
|
||||
threshold: 1%
|
||||
- type: patch
|
||||
target: 90%
|
||||
@@ -38,13 +38,6 @@ dependencies {
|
||||
implementation group: 'dev.failsafe', name: 'failsafe'
|
||||
api 'org.apache.httpcomponents.client5:httpclient5'
|
||||
|
||||
// plugins
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-impl'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-supplier'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-connector-basic'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-transport-file'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-transport-http'
|
||||
|
||||
// scheduler
|
||||
implementation group: 'com.cronutils', name: 'cron-utils'
|
||||
|
||||
@@ -73,7 +66,7 @@ dependencies {
|
||||
testImplementation "io.micronaut:micronaut-http-server-netty"
|
||||
testImplementation "io.micronaut:micronaut-management"
|
||||
|
||||
testImplementation "org.testcontainers:testcontainers:1.20.6"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.20.6"
|
||||
testImplementation "org.testcontainers:testcontainers:1.20.4"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.20.4"
|
||||
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.80"
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package io.kestra.core.contexts;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.plugins.DefaultPluginRegistry;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.serdes.PluginDeserializer;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.StorageInterfaceFactory;
|
||||
import io.micronaut.context.annotation.Bean;
|
||||
@@ -33,7 +34,7 @@ public class KestraBeansFactory {
|
||||
StorageConfig storageConfig;
|
||||
|
||||
@Value("${kestra.storage.type}")
|
||||
protected Optional<String> storageType;
|
||||
Optional<String> storageType;
|
||||
|
||||
@Requires(missingBeans = PluginRegistry.class)
|
||||
@Singleton
|
||||
@@ -41,25 +42,16 @@ public class KestraBeansFactory {
|
||||
return DefaultPluginRegistry.getOrCreate();
|
||||
}
|
||||
|
||||
@Singleton
|
||||
public StorageInterfaceFactory storageInterfaceFactory(final PluginRegistry pluginRegistry){
|
||||
return new StorageInterfaceFactory(pluginRegistry, validator);
|
||||
}
|
||||
|
||||
@Requires(missingBeans = StorageInterface.class)
|
||||
@Singleton
|
||||
@Bean(preDestroy = "close")
|
||||
public StorageInterface storageInterface(final StorageInterfaceFactory storageInterfaceFactory) throws IOException {
|
||||
String pluginId = getStoragePluginId(storageInterfaceFactory);
|
||||
return storageInterfaceFactory.make(null, pluginId, storageConfig.getStorageConfig(pluginId));
|
||||
}
|
||||
|
||||
public String getStoragePluginId(StorageInterfaceFactory storageInterfaceFactory) {
|
||||
return storageType.orElseThrow(() -> new KestraRuntimeException(String.format(
|
||||
public StorageInterface storageInterface(final PluginRegistry pluginRegistry) throws IOException {
|
||||
String pluginId = storageType.orElseThrow(() -> new KestraRuntimeException(String.format(
|
||||
"No storage configured through the application property '%s'. Supported types are: %s"
|
||||
, KESTRA_STORAGE_TYPE_CONFIG,
|
||||
storageInterfaceFactory.getLoggableStorageIds()
|
||||
StorageInterfaceFactory.getLoggableStorageIds(pluginRegistry)
|
||||
)));
|
||||
return StorageInterfaceFactory.make(pluginRegistry, pluginId, storageConfig.getStorageConfig(pluginId), validator);
|
||||
}
|
||||
|
||||
@ConfigurationProperties("kestra")
|
||||
@@ -75,7 +67,7 @@ public class KestraBeansFactory {
|
||||
* @return the configuration.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map<String, Object> getStorageConfig(String type) {
|
||||
private Map<String, Object> getStorageConfig(String type) {
|
||||
return (Map<String, Object>) storage.get(StringConvention.CAMEL_CASE.format(type));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,8 +10,6 @@ import io.micronaut.context.env.Environment;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
@@ -27,11 +25,7 @@ public abstract class KestraContext {
|
||||
private static final AtomicReference<KestraContext> INSTANCE = new AtomicReference<>();
|
||||
|
||||
// Properties
|
||||
public static final String KESTRA_SERVER_TYPE = "kestra.server-type";
|
||||
|
||||
// Those properties are injected bases on the CLI args.
|
||||
private static final String KESTRA_WORKER_MAX_NUM_THREADS = "kestra.worker.max-num-threads";
|
||||
private static final String KESTRA_WORKER_GROUP_KEY = "kestra.worker.group-key";
|
||||
private static final String KESTRA_SERVER_TYPE = "kestra.server-type";
|
||||
|
||||
/**
|
||||
* Gets the current {@link KestraContext}.
|
||||
@@ -60,12 +54,6 @@ public abstract class KestraContext {
|
||||
*/
|
||||
public abstract ServerType getServerType();
|
||||
|
||||
public abstract Optional<Integer> getWorkerMaxNumThreads();
|
||||
|
||||
public abstract Optional<String> getWorkerGroupKey();
|
||||
|
||||
public abstract void injectWorkerConfigs(Integer maxNumThreads, String workerGroupKey);
|
||||
|
||||
/**
|
||||
* Returns the Kestra Version.
|
||||
*
|
||||
@@ -122,34 +110,6 @@ public abstract class KestraContext {
|
||||
.orElse(ServerType.STANDALONE);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public Optional<Integer> getWorkerMaxNumThreads() {
|
||||
return Optional.ofNullable(environment)
|
||||
.flatMap(env -> env.getProperty(KESTRA_WORKER_MAX_NUM_THREADS, Integer.class));
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public Optional<String> getWorkerGroupKey() {
|
||||
return Optional.ofNullable(environment)
|
||||
.flatMap(env -> env.getProperty(KESTRA_WORKER_GROUP_KEY, String.class));
|
||||
}
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public void injectWorkerConfigs(Integer maxNumThreads, String workerGroupKey) {
|
||||
final Map<String, Object> configs = new HashMap<>();
|
||||
Optional.ofNullable(maxNumThreads)
|
||||
.ifPresent(val -> configs.put(KESTRA_WORKER_MAX_NUM_THREADS, val));
|
||||
|
||||
Optional.ofNullable(workerGroupKey)
|
||||
.ifPresent(val -> configs.put(KESTRA_WORKER_GROUP_KEY, val));
|
||||
|
||||
if (!configs.isEmpty()) {
|
||||
environment.addPropertySource("kestra-runtime", configs);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public void shutdown() {
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
package io.kestra.core.contexts;
|
||||
|
||||
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import lombok.Builder;
|
||||
|
||||
@Builder
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
public record MavenPluginRepositoryConfig(
|
||||
@Parameter
|
||||
String id,
|
||||
String url,
|
||||
@Nullable
|
||||
BasicAuth basicAuth
|
||||
) {
|
||||
|
||||
@Builder
|
||||
@ConfigurationProperties("basic-auth")
|
||||
public record BasicAuth(
|
||||
String username,
|
||||
String password
|
||||
) {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.google.common.base.CaseFormat;
|
||||
import io.kestra.core.models.Plugin;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
import io.kestra.core.models.tasks.runners.TaskRunner;
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import io.kestra.core.plugins.PluginClassAndMetadata;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Getter
|
||||
@EqualsAndHashCode
|
||||
@@ -20,18 +21,16 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
private Map<String, Object> outputsSchema;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private ClassPluginDocumentation(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
|
||||
super(jsonSchemaGenerator, plugin.type(), allProperties ? null : plugin.baseClass());
|
||||
private ClassPluginDocumentation(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls, String alias) {
|
||||
super(jsonSchemaGenerator, cls, baseCls);
|
||||
|
||||
// plugins metadata
|
||||
Class<? extends T> cls = plugin.type();
|
||||
|
||||
this.cls = plugin.alias() == null ? cls.getName() : plugin.alias();
|
||||
this.cls = alias == null ? cls.getName() : alias;
|
||||
this.group = plugin.group();
|
||||
this.docLicense = plugin.license();
|
||||
this.pluginTitle = plugin.title();
|
||||
this.icon = plugin.icon();
|
||||
if (plugin.alias() != null) {
|
||||
this.icon = plugin.icon(cls);
|
||||
if (alias != null) {
|
||||
replacement = cls.getName();
|
||||
}
|
||||
|
||||
@@ -39,10 +38,10 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
this.subGroup = cls.getPackageName().substring(this.group.length() + 1);
|
||||
}
|
||||
|
||||
this.shortName = plugin.alias() == null ? cls.getSimpleName() : plugin.alias().substring(plugin.alias().lastIndexOf('.') + 1);
|
||||
this.shortName = alias == null ? cls.getSimpleName() : alias.substring(alias.lastIndexOf('.') + 1);
|
||||
|
||||
// outputs
|
||||
this.outputsSchema = jsonSchemaGenerator.outputs(allProperties ? null : plugin.baseClass(), cls);
|
||||
this.outputsSchema = jsonSchemaGenerator.outputs(baseCls, cls);
|
||||
|
||||
if (this.outputsSchema.containsKey("$defs")) {
|
||||
this.defs.putAll((Map<String, Object>) this.outputsSchema.get("$defs"));
|
||||
@@ -68,13 +67,17 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
.toList();
|
||||
}
|
||||
|
||||
if (plugin.alias() != null) {
|
||||
if (alias != null) {
|
||||
this.deprecated = true;
|
||||
}
|
||||
}
|
||||
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, allProperties);
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, cls, baseCls, null);
|
||||
}
|
||||
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls, String alias) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, cls, baseCls, alias);
|
||||
}
|
||||
|
||||
@AllArgsConstructor
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.annotations.PluginSubGroup;
|
||||
import io.kestra.core.models.conditions.Condition;
|
||||
@@ -7,7 +8,6 @@ import io.kestra.core.models.tasks.logs.LogExporter;
|
||||
import io.kestra.core.models.tasks.runners.TaskRunner;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.plugins.PluginClassAndMetadata;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.runners.pebble.Extension;
|
||||
import io.kestra.core.runners.pebble.JsonWriter;
|
||||
@@ -29,7 +29,6 @@ import org.apache.commons.io.IOUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -38,7 +37,7 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
@Singleton
|
||||
public class DocumentationGenerator {
|
||||
private static final PebbleEngine PEBBLE_ENGINE;
|
||||
private static PebbleEngine pebbleEngine;
|
||||
|
||||
@Inject
|
||||
JsonSchemaGenerator jsonSchemaGenerator;
|
||||
@@ -47,7 +46,7 @@ public class DocumentationGenerator {
|
||||
ClasspathLoader classpathLoader = new ClasspathLoader();
|
||||
classpathLoader.setPrefix("docs/");
|
||||
|
||||
PEBBLE_ENGINE = new PebbleEngine.Builder()
|
||||
pebbleEngine = new PebbleEngine.Builder()
|
||||
.newLineTrimming(false)
|
||||
.loader(classpathLoader)
|
||||
.extension(new AbstractExtension() {
|
||||
@@ -63,7 +62,6 @@ public class DocumentationGenerator {
|
||||
.build();
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public List<Document> generate(RegisteredPlugin registeredPlugin) throws Exception {
|
||||
ArrayList<Document> result = new ArrayList<>();
|
||||
|
||||
@@ -74,7 +72,7 @@ public class DocumentationGenerator {
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getConditions(), Condition.class, "conditions"));
|
||||
//noinspection unchecked
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getTaskRunners(), (Class) TaskRunner.class, "task-runners"));
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getLogExporters(), (Class) LogExporter.class, "log-exporters"));
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getLogExporters(), LogExporter.class, "log-exporters"));
|
||||
|
||||
result.addAll(guides(registeredPlugin));
|
||||
|
||||
@@ -218,15 +216,7 @@ public class DocumentationGenerator {
|
||||
private <T> List<Document> generate(RegisteredPlugin registeredPlugin, List<Class<? extends T>> cls, Class<T> baseCls, String type) {
|
||||
return cls
|
||||
.stream()
|
||||
.map(pluginClass -> {
|
||||
PluginClassAndMetadata<T> metadata = PluginClassAndMetadata.create(
|
||||
registeredPlugin,
|
||||
pluginClass,
|
||||
baseCls,
|
||||
null
|
||||
);
|
||||
return ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, true);
|
||||
})
|
||||
.map(r -> ClassPluginDocumentation.of(jsonSchemaGenerator, registeredPlugin, r, baseCls))
|
||||
.map(pluginDocumentation -> {
|
||||
try {
|
||||
return new Document(
|
||||
@@ -256,21 +246,21 @@ public class DocumentationGenerator {
|
||||
classPluginDocumentation.getCls() + ".md";
|
||||
}
|
||||
|
||||
public static String render(ClassPluginDocumentation<?> classPluginDocumentation) throws IOException {
|
||||
public static <T> String render(ClassPluginDocumentation<T> classPluginDocumentation) throws IOException {
|
||||
return render("task", JacksonMapper.toMap(classPluginDocumentation));
|
||||
}
|
||||
|
||||
public static String render(AbstractClassDocumentation classInputDocumentation) throws IOException {
|
||||
public static <T> String render(AbstractClassDocumentation<T> classInputDocumentation) throws IOException {
|
||||
return render("task", JacksonMapper.toMap(classInputDocumentation));
|
||||
}
|
||||
|
||||
public static String render(String templateName, Map<String, Object> vars) throws IOException {
|
||||
public static <T> String render(String templateName, Map<String, Object> vars) throws IOException {
|
||||
String pebbleTemplate = IOUtils.toString(
|
||||
Objects.requireNonNull(DocumentationGenerator.class.getClassLoader().getResourceAsStream("docs/" + templateName + ".peb")),
|
||||
StandardCharsets.UTF_8
|
||||
Charsets.UTF_8
|
||||
);
|
||||
|
||||
PebbleTemplate compiledTemplate = PEBBLE_ENGINE.getLiteralTemplate(pebbleTemplate);
|
||||
PebbleTemplate compiledTemplate = pebbleEngine.getLiteralTemplate(pebbleTemplate);
|
||||
|
||||
Writer writer = new JsonWriter();
|
||||
compiledTemplate.evaluate(writer, vars);
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import io.kestra.core.models.dashboards.Dashboard;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.PluginDefault;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
* Service for getting schemas.
|
||||
*/
|
||||
@Singleton
|
||||
public class JsonSchemaCache {
|
||||
|
||||
private final JsonSchemaGenerator jsonSchemaGenerator;
|
||||
|
||||
private final ConcurrentMap<CacheKey, Map<String, Object>> schemaCache = new ConcurrentHashMap<>();
|
||||
|
||||
private final Map<SchemaType, Class<?>> classesBySchemaType = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Creates a new {@link JsonSchemaCache} instance.
|
||||
*
|
||||
* @param jsonSchemaGenerator The {@link JsonSchemaGenerator}.
|
||||
*/
|
||||
public JsonSchemaCache(final JsonSchemaGenerator jsonSchemaGenerator) {
|
||||
this.jsonSchemaGenerator = Objects.requireNonNull(jsonSchemaGenerator, "JsonSchemaGenerator cannot be null");
|
||||
registerClassForType(SchemaType.FLOW, Flow.class);
|
||||
registerClassForType(SchemaType.TEMPLATE, Template.class);
|
||||
registerClassForType(SchemaType.TASK, Task.class);
|
||||
registerClassForType(SchemaType.TRIGGER, AbstractTrigger.class);
|
||||
registerClassForType(SchemaType.PLUGINDEFAULT, PluginDefault.class);
|
||||
registerClassForType(SchemaType.DASHBOARD, Dashboard.class);
|
||||
}
|
||||
|
||||
public Map<String, Object> getSchemaForType(final SchemaType type,
|
||||
final boolean arrayOf) {
|
||||
return schemaCache.computeIfAbsent(new CacheKey(type, arrayOf), (key) -> {
|
||||
|
||||
Class<?> cls = Optional.ofNullable(classesBySchemaType.get(type))
|
||||
.orElseThrow(() -> new IllegalArgumentException("Cannot found schema for type '" + type + "'"));
|
||||
return jsonSchemaGenerator.schemas(cls, arrayOf);
|
||||
});
|
||||
}
|
||||
|
||||
public void registerClassForType(final SchemaType type, final Class<?> clazz) {
|
||||
classesBySchemaType.put(type, clazz);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
schemaCache.clear();
|
||||
}
|
||||
|
||||
private record CacheKey(SchemaType type, boolean arrayOf) {
|
||||
}
|
||||
}
|
||||
@@ -81,7 +81,7 @@ public class JsonSchemaGenerator {
|
||||
objectNode.put("type", "array");
|
||||
}
|
||||
replaceAnyOfWithOneOf(objectNode);
|
||||
pullDocumentationAndDefaultFromOneOf(objectNode);
|
||||
pullOfDefaultFromOneOf(objectNode);
|
||||
removeRequiredOnPropsWithDefaults(objectNode);
|
||||
|
||||
return JacksonMapper.toMap(objectNode);
|
||||
@@ -122,35 +122,22 @@ public class JsonSchemaGenerator {
|
||||
// This hack exists because for Property we generate a oneOf for properties that are not strings.
|
||||
// By default, the 'default' is in each oneOf which Monaco editor didn't take into account.
|
||||
// So, we pull off the 'default' from any of the oneOf to the parent.
|
||||
// same thing for documentation fields: 'title', 'description', '$deprecated'
|
||||
private void pullDocumentationAndDefaultFromOneOf(ObjectNode objectNode) {
|
||||
private void pullOfDefaultFromOneOf(ObjectNode objectNode) {
|
||||
objectNode.findParents("oneOf").forEach(jsonNode -> {
|
||||
if (jsonNode instanceof ObjectNode oNode) {
|
||||
JsonNode oneOf = oNode.get("oneOf");
|
||||
if (oneOf instanceof ArrayNode arrayNode) {
|
||||
Iterator<JsonNode> it = arrayNode.elements();
|
||||
var nodesToPullUp = new HashMap<String, Optional<JsonNode>>(Map.ofEntries(
|
||||
Map.entry("default", Optional.empty()),
|
||||
Map.entry("title", Optional.empty()),
|
||||
Map.entry("description", Optional.empty()),
|
||||
Map.entry("$deprecated", Optional.empty())
|
||||
));
|
||||
// find nodes to pull up
|
||||
while (it.hasNext() && nodesToPullUp.containsValue(Optional.<JsonNode>empty())) {
|
||||
JsonNode defaultNode = null;
|
||||
while (it.hasNext() && defaultNode == null) {
|
||||
JsonNode next = it.next();
|
||||
if (next instanceof ObjectNode nextAsObj) {
|
||||
nodesToPullUp.entrySet().stream()
|
||||
.filter(node -> node.getValue().isEmpty())
|
||||
.forEach(node -> node
|
||||
.setValue(Optional.ofNullable(
|
||||
nextAsObj.get(node.getKey())
|
||||
)));
|
||||
defaultNode = nextAsObj.get("default");
|
||||
}
|
||||
}
|
||||
// create nodes on parent
|
||||
nodesToPullUp.entrySet().stream()
|
||||
.filter(node -> node.getValue().isPresent())
|
||||
.forEach(node -> oNode.set(node.getKey(), node.getValue().get()));
|
||||
if (defaultNode != null) {
|
||||
oNode.set("default", defaultNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -331,9 +318,6 @@ public class JsonSchemaGenerator {
|
||||
if (pluginPropertyAnnotation.beta()) {
|
||||
memberAttributes.put("$beta", true);
|
||||
}
|
||||
if (pluginPropertyAnnotation.internalStorageURI()) {
|
||||
memberAttributes.put("$internalStorageURI", true);
|
||||
}
|
||||
}
|
||||
|
||||
Schema schema = member.getAnnotationConsideringFieldAndGetter(Schema.class);
|
||||
@@ -642,7 +626,7 @@ public class JsonSchemaGenerator {
|
||||
try {
|
||||
ObjectNode objectNode = generator.generateSchema(cls);
|
||||
replaceAnyOfWithOneOf(objectNode);
|
||||
pullDocumentationAndDefaultFromOneOf(objectNode);
|
||||
pullOfDefaultFromOneOf(objectNode);
|
||||
removeRequiredOnPropsWithDefaults(objectNode);
|
||||
|
||||
return JacksonMapper.toMap(extractMainRef(objectNode));
|
||||
|
||||
@@ -7,7 +7,6 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.function.Predicate.not;
|
||||
@@ -51,12 +50,9 @@ public class Plugin {
|
||||
if (subgroup == null) {
|
||||
plugin.title = registeredPlugin.title();
|
||||
} else {
|
||||
subGroupInfos = registeredPlugin.allClass().stream()
|
||||
.filter(c -> c.getPackageName().contains(subgroup))
|
||||
.min(Comparator.comparingInt(a -> a.getPackageName().length()))
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.orElseThrow();
|
||||
plugin.title = !subGroupInfos.title().isEmpty() ? subGroupInfos.title() : subgroup.substring(subgroup.lastIndexOf('.') + 1);
|
||||
subGroupInfos = registeredPlugin.allClass().stream().filter(c -> c.getName().contains(subgroup)).map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class)).toList().getFirst();
|
||||
plugin.title = !subGroupInfos.title().isEmpty() ? subGroupInfos.title() : subgroup.substring(subgroup.lastIndexOf('.') + 1);;
|
||||
|
||||
}
|
||||
plugin.group = registeredPlugin.group();
|
||||
plugin.description = subGroupInfos != null && !subGroupInfos.description().isEmpty() ? subGroupInfos.description() : registeredPlugin.description();
|
||||
@@ -78,28 +74,27 @@ public class Plugin {
|
||||
plugin.categories = subGroupInfos != null ?
|
||||
Arrays.stream(subGroupInfos.categories()).toList() :
|
||||
registeredPlugin
|
||||
.allClass()
|
||||
.stream()
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(r -> Arrays.stream(r.categories()))
|
||||
.distinct()
|
||||
.toList();
|
||||
.allClass()
|
||||
.stream()
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(r -> Arrays.stream(r.categories()))
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
plugin.subGroup = subgroup;
|
||||
|
||||
Predicate<Class<?>> packagePredicate = c -> subgroup == null || c.getPackageName().equals(subgroup);
|
||||
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
|
||||
return plugin;
|
||||
}
|
||||
@@ -108,16 +103,15 @@ public class Plugin {
|
||||
* Filters the given list of class all internal Plugin, as well as, all legacy org.kestra classes.
|
||||
* Those classes are only filtered from the documentation to ensure backward compatibility.
|
||||
*
|
||||
* @param list The list of classes?
|
||||
* @param list The list of classes?
|
||||
* @param includeDeprecated whether to include deprecated plugins or not
|
||||
* @return a filtered streams.
|
||||
* @return a filtered streams.
|
||||
*/
|
||||
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list, boolean includeDeprecated, Predicate<Class<?>> clazzFilter) {
|
||||
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list, boolean includeDeprecated) {
|
||||
return list
|
||||
.stream()
|
||||
.filter(not(io.kestra.core.models.Plugin::isInternal))
|
||||
.filter(p -> includeDeprecated || !io.kestra.core.models.Plugin.isDeprecated(p))
|
||||
.filter(clazzFilter)
|
||||
.map(Class::getName)
|
||||
.filter(c -> !c.startsWith("org.kestra."))
|
||||
.toList();
|
||||
|
||||
@@ -1,20 +1,11 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import io.kestra.core.utils.Enums;
|
||||
|
||||
|
||||
public enum SchemaType {
|
||||
FLOW,
|
||||
TEMPLATE,
|
||||
TASK,
|
||||
TRIGGER,
|
||||
PLUGINDEFAULT,
|
||||
APPS,
|
||||
DASHBOARD;
|
||||
|
||||
@JsonCreator
|
||||
public static SchemaType fromString(final String value) {
|
||||
return Enums.getForNameIgnoreCase(value, SchemaType.class);
|
||||
}
|
||||
flow,
|
||||
template,
|
||||
task,
|
||||
trigger,
|
||||
plugindefault,
|
||||
apps,
|
||||
dashboard
|
||||
}
|
||||
|
||||
@@ -7,8 +7,6 @@ public enum CrudEventType {
|
||||
DELETE,
|
||||
LOGIN,
|
||||
LOGOUT,
|
||||
IMPERSONATE,
|
||||
LOGIN_FAILURE,
|
||||
ACCOUNT_LOCKED
|
||||
IMPERSONATE
|
||||
}
|
||||
|
||||
|
||||
@@ -142,22 +142,12 @@ public class HttpRequest {
|
||||
public abstract static class RequestBody {
|
||||
public abstract HttpEntity to() throws IOException;
|
||||
|
||||
public abstract Object getContent() throws IOException;
|
||||
|
||||
public abstract Charset getCharset() throws IOException;
|
||||
|
||||
public abstract String getContentType() throws IOException;
|
||||
|
||||
protected ContentType entityContentType() throws IOException {
|
||||
return this.getCharset() != null ? ContentType.create(this.getContentType(), this.getCharset()) : ContentType.create(this.getContentType());
|
||||
}
|
||||
|
||||
public static RequestBody from(HttpEntity entity) throws IOException {
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Charset charset = entity.getContentEncoding() != null ? Charset.forName(entity.getContentEncoding()) : StandardCharsets.UTF_8;
|
||||
Charset charset = Charset.forName(entity.getContentEncoding());
|
||||
|
||||
if (entity.getContentType().equals(ContentType.APPLICATION_OCTET_STREAM.getMimeType())) {
|
||||
return ByteArrayRequestBody.builder()
|
||||
@@ -182,80 +172,71 @@ public class HttpRequest {
|
||||
.build();
|
||||
}
|
||||
|
||||
return ByteArrayRequestBody.builder()
|
||||
.charset(charset)
|
||||
.contentType(entity.getContentType())
|
||||
.content(entity.getContent().readAllBytes())
|
||||
.build();
|
||||
throw new IllegalArgumentException("Unsupported Content-Type: " + entity.getContentType());
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class InputStreamRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private String contentType = ContentType.APPLICATION_OCTET_STREAM.getMimeType();
|
||||
|
||||
private Charset charset;
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
|
||||
private InputStream content;
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
return new InputStreamEntity(content, this.entityContentType());
|
||||
public HttpEntity to() {
|
||||
return new InputStreamEntity(content, ContentType.create(contentType, charset));
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class StringRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private String contentType = ContentType.TEXT_PLAIN.getMimeType();
|
||||
|
||||
private Charset charset;
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
|
||||
private String content;
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
return new StringEntity(this.content, this.entityContentType());
|
||||
public HttpEntity to() {
|
||||
return new StringEntity(this.content, ContentType.create(contentType, charset));
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class ByteArrayRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private String contentType = ContentType.APPLICATION_OCTET_STREAM.getMimeType();
|
||||
|
||||
private Charset charset;
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
|
||||
private byte[] content;
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
return new ByteArrayEntity(content, this.entityContentType());
|
||||
public HttpEntity to() {
|
||||
return new ByteArrayEntity(content, ContentType.create(contentType, charset));
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class JsonRequestBody extends RequestBody {
|
||||
private Charset charset;
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
|
||||
private Object content;
|
||||
|
||||
@Override
|
||||
public String getContentType() throws IOException {
|
||||
return ContentType.APPLICATION_JSON.getMimeType();
|
||||
}
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
try {
|
||||
return new StringEntity(
|
||||
JacksonMapper.ofJson().writeValueAsString(content),
|
||||
this.charset != null ? ContentType.APPLICATION_JSON.withCharset(this.charset) : ContentType.APPLICATION_JSON
|
||||
ContentType.APPLICATION_JSON.withCharset(this.charset)
|
||||
);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IOException(e);
|
||||
@@ -263,49 +244,37 @@ public class HttpRequest {
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class UrlEncodedRequestBody extends RequestBody {
|
||||
private Charset charset;
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
|
||||
private Map<String, Object> content;
|
||||
|
||||
@Override
|
||||
public String getContentType() throws IOException {
|
||||
return ContentType.APPLICATION_FORM_URLENCODED.getMimeType();
|
||||
}
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
List<BasicNameValuePair> list = this.content.entrySet()
|
||||
.stream()
|
||||
.map(e -> new BasicNameValuePair(e.getKey(), e.getValue().toString()))
|
||||
.toList();
|
||||
|
||||
return this.charset != null ? new UrlEncodedFormEntity(list, this.charset) : new UrlEncodedFormEntity(list);
|
||||
return new UrlEncodedFormEntity(
|
||||
this.content .entrySet()
|
||||
.stream()
|
||||
.map(e -> new BasicNameValuePair(e.getKey(), e.getValue().toString()))
|
||||
.toList(),
|
||||
this.charset
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class MultipartRequestBody extends RequestBody {
|
||||
private Charset charset;
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
|
||||
private Map<String, Object> content;
|
||||
|
||||
@Override
|
||||
public String getContentType() throws IOException {
|
||||
return ContentType.MULTIPART_MIXED.getMimeType();
|
||||
}
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
MultipartEntityBuilder builder = MultipartEntityBuilder
|
||||
.create();
|
||||
|
||||
if (this.charset != null) {
|
||||
builder.setCharset(this.charset);
|
||||
}
|
||||
.create()
|
||||
.setCharset(this.charset);
|
||||
|
||||
content.forEach((key, value) -> {
|
||||
switch (value) {
|
||||
|
||||
@@ -54,7 +54,7 @@ public class HttpResponse<T> {
|
||||
null
|
||||
)
|
||||
.headers(HttpService.toHttpHeaders(response.getHeaders()))
|
||||
.body(response instanceof ClassicHttpResponse classicHttpResponse && classicHttpResponse.getEntity() != null ?
|
||||
.body(response instanceof ClassicHttpResponse classicHttpResponse ?
|
||||
IOUtils.toByteArray(classicHttpResponse.getEntity().getContent()) :
|
||||
null
|
||||
)
|
||||
|
||||
@@ -29,7 +29,6 @@ import org.apache.hc.core5.http.io.HttpClientResponseHandler;
|
||||
import org.apache.hc.core5.http.io.entity.EntityUtils;
|
||||
import org.apache.hc.core5.ssl.SSLContexts;
|
||||
import org.apache.hc.core5.util.Timeout;
|
||||
import org.codehaus.plexus.util.StringUtils;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
@@ -88,49 +87,47 @@ public class HttpClient implements Closeable {
|
||||
|
||||
// Timeout
|
||||
if (this.configuration.getTimeout() != null) {
|
||||
var connectTimeout = runContext.render(this.configuration.getTimeout().getConnectTimeout()).as(Duration.class);
|
||||
connectTimeout.ifPresent(duration -> connectionConfig.setConnectTimeout(Timeout.of(duration)));
|
||||
var connectTiemout = runContext.render(this.configuration.getTimeout().getConnectTimeout()).as(Duration.class);
|
||||
connectTiemout.ifPresent(duration -> connectionConfig.setConnectTimeout(Timeout.of(duration)));
|
||||
|
||||
var readIdleTimeout = runContext.render(this.configuration.getTimeout().getReadIdleTimeout()).as(Duration.class);
|
||||
readIdleTimeout.ifPresent(duration -> connectionConfig.setSocketTimeout(Timeout.of(duration)));
|
||||
var readIdleTiemout = runContext.render(this.configuration.getTimeout().getReadIdleTimeout()).as(Duration.class);
|
||||
readIdleTiemout.ifPresent(duration -> connectionConfig.setSocketTimeout(Timeout.of(duration)));
|
||||
}
|
||||
|
||||
// proxy
|
||||
if (this.configuration.getProxy() != null && configuration.getProxy().getAddress() != null) {
|
||||
String proxyAddress = runContext.render(configuration.getProxy().getAddress()).as(String.class).orElse(null);
|
||||
SocketAddress proxyAddr = new InetSocketAddress(
|
||||
runContext.render(configuration.getProxy().getAddress()).as(String.class).orElse(null),
|
||||
runContext.render(configuration.getProxy().getPort()).as(Integer.class).orElse(null)
|
||||
);
|
||||
|
||||
if (StringUtils.isNotEmpty(proxyAddress)) {
|
||||
int port = runContext.render(configuration.getProxy().getPort()).as(Integer.class).orElseThrow();
|
||||
SocketAddress proxyAddr = new InetSocketAddress(
|
||||
proxyAddress,
|
||||
port
|
||||
);
|
||||
Proxy proxy = new Proxy(runContext.render(configuration.getProxy().getType()).as(Proxy.Type.class).orElse(null), proxyAddr);
|
||||
|
||||
Proxy proxy = new Proxy(runContext.render(configuration.getProxy().getType()).as(Proxy.Type.class).orElse(null), proxyAddr);
|
||||
|
||||
builder.setProxySelector(new ProxySelector() {
|
||||
@Override
|
||||
public void connectFailed(URI uri, SocketAddress sa, IOException e) {
|
||||
/* ignore */
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Proxy> select(URI uri) {
|
||||
return List.of(proxy);
|
||||
}
|
||||
});
|
||||
|
||||
if (this.configuration.getProxy().getUsername() != null && this.configuration.getProxy().getPassword() != null) {
|
||||
builder.setProxyAuthenticationStrategy(new DefaultAuthenticationStrategy());
|
||||
|
||||
credentialsStore.setCredentials(
|
||||
new AuthScope(proxyAddress, port),
|
||||
new UsernamePasswordCredentials(
|
||||
runContext.render(this.configuration.getProxy().getUsername()).as(String.class).orElseThrow(),
|
||||
runContext.render(this.configuration.getProxy().getPassword()).as(String.class).orElseThrow().toCharArray()
|
||||
)
|
||||
);
|
||||
builder.setProxySelector(new ProxySelector() {
|
||||
@Override
|
||||
public void connectFailed(URI uri, SocketAddress sa, IOException e) {
|
||||
/* ignore */
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Proxy> select(URI uri) {
|
||||
return List.of(proxy);
|
||||
}
|
||||
});
|
||||
|
||||
if (this.configuration.getProxy().getUsername() != null && this.configuration.getProxy().getPassword() != null) {
|
||||
builder.setProxyAuthenticationStrategy(new DefaultAuthenticationStrategy());
|
||||
|
||||
credentialsStore.setCredentials(
|
||||
new AuthScope(
|
||||
runContext.render(this.configuration.getProxy().getAddress()).as(String.class).orElse(null),
|
||||
runContext.render(this.configuration.getProxy().getPort()).as(Integer.class).orElse(null)
|
||||
),
|
||||
new UsernamePasswordCredentials(
|
||||
runContext.render(this.configuration.getProxy().getUsername()).as(String.class).orElseThrow(),
|
||||
runContext.render(this.configuration.getProxy().getPassword()).as(String.class).orElseThrow().toCharArray()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -23,9 +23,9 @@ public class RunContextResponseInterceptor implements HttpResponseInterceptor {
|
||||
response instanceof BasicClassicHttpResponse httpResponse
|
||||
) {
|
||||
try {
|
||||
// FIXME temporary fix for https://github.com/kestra-io/kestra/issues/8092
|
||||
runContext.logger().debug(
|
||||
"Request " + httpClientContext.getRequest().getUri() + " from '{}' with the response code '{}'",
|
||||
"Request '{}' from '{}' with the response code '{}'",
|
||||
httpClientContext.getRequest().getUri(),
|
||||
httpClientContext.getEndpointDetails().getRemoteAddress(),
|
||||
response.getCode()
|
||||
);
|
||||
|
||||
@@ -3,6 +3,7 @@ package io.kestra.core.http.client.configurations;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
@@ -16,7 +17,7 @@ import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@NoArgsConstructor
|
||||
public abstract class AbstractAuthConfiguration {
|
||||
public abstract AuthType getType();
|
||||
public abstract Property<AuthType> getType();
|
||||
|
||||
public abstract void configure(HttpClientBuilder builder, RunContext runContext) throws IllegalVariableEvaluationException;
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ public class BasicAuthConfiguration extends AbstractAuthConfiguration {
|
||||
@NotNull
|
||||
@JsonInclude
|
||||
@Builder.Default
|
||||
protected AuthType type = AuthType.BASIC;
|
||||
protected Property<AuthType> type = Property.of(AuthType.BASIC);
|
||||
|
||||
@Schema(title = "The username for HTTP basic authentication.")
|
||||
private Property<String> username;
|
||||
|
||||
@@ -21,7 +21,7 @@ public class BearerAuthConfiguration extends AbstractAuthConfiguration {
|
||||
@NotNull
|
||||
@JsonInclude
|
||||
@Builder.Default
|
||||
protected AuthType type = AuthType.BEARER;
|
||||
protected Property<AuthType> type = Property.of(AuthType.BEARER);
|
||||
|
||||
@Schema(title = "The token for bearer token authentication.")
|
||||
private Property<String> token;
|
||||
|
||||
@@ -2,17 +2,18 @@ package io.kestra.core.http.client.configurations;
|
||||
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.micronaut.http.client.HttpClientConfiguration;
|
||||
import io.micronaut.logging.LogLevel;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
|
||||
import java.net.Proxy;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
|
||||
@Builder(toBuilder = true)
|
||||
@Getter
|
||||
@@ -29,7 +30,6 @@ public class HttpConfiguration {
|
||||
@Schema(title = "The authentification to use.")
|
||||
private AbstractAuthConfiguration auth;
|
||||
|
||||
@Setter
|
||||
@Schema(title = "The SSL request options")
|
||||
private SslOptions ssl;
|
||||
|
||||
@@ -37,7 +37,6 @@ public class HttpConfiguration {
|
||||
@Builder.Default
|
||||
private Property<Boolean> followRedirects = Property.of(true);
|
||||
|
||||
@Setter
|
||||
@Schema(title = "If true, allow a failed response code (response code >= 400)")
|
||||
@Builder.Default
|
||||
private Property<Boolean> allowFailed = Property.of(false);
|
||||
@@ -63,12 +62,14 @@ public class HttpConfiguration {
|
||||
private final Duration connectTimeout;
|
||||
|
||||
@Schema(title = "The maximum time allowed for reading data from the server before failing.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Duration readTimeout;
|
||||
private final Duration readTimeout = Duration.ofSeconds(HttpClientConfiguration.DEFAULT_READ_TIMEOUT_SECONDS);
|
||||
|
||||
@Schema(title = "The type of proxy to use.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Proxy.Type proxyType;
|
||||
private final Proxy.Type proxyType = Proxy.Type.DIRECT;
|
||||
|
||||
@Schema(title = "The address of the proxy server.")
|
||||
@Deprecated
|
||||
@@ -101,16 +102,19 @@ public class HttpConfiguration {
|
||||
|
||||
// Deprecated properties with no equivalent value to be kept, silently ignore
|
||||
@Schema(title = "The time allowed for a read connection to remain idle before closing it.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Duration readIdleTimeout;
|
||||
private final Duration readIdleTimeout = Duration.of(HttpClientConfiguration.DEFAULT_READ_IDLE_TIMEOUT_MINUTES, ChronoUnit.MINUTES);
|
||||
|
||||
@Schema(title = "The time an idle connection can remain in the client's connection pool before being closed.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Duration connectionPoolIdleTimeout;
|
||||
private final Duration connectionPoolIdleTimeout = Duration.ofSeconds(HttpClientConfiguration.DEFAULT_CONNECTION_POOL_IDLE_TIMEOUT_SECONDS);
|
||||
|
||||
@Schema(title = "The maximum content length of the response.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Integer maxContentLength;
|
||||
private final Integer maxContentLength = HttpClientConfiguration.DEFAULT_MAX_CONTENT_LENGTH;
|
||||
|
||||
public static class HttpConfigurationBuilder {
|
||||
@Deprecated
|
||||
|
||||
@@ -4,13 +4,11 @@ import io.kestra.core.models.property.Property;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
|
||||
import java.net.Proxy;
|
||||
|
||||
@Getter
|
||||
@Builder(toBuilder = true)
|
||||
@Jacksonized
|
||||
public class ProxyConfiguration {
|
||||
@Schema(title = "The type of proxy to use.")
|
||||
@Builder.Default
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
package io.kestra.core.log;
|
||||
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import ch.qos.logback.core.boolex.EvaluationException;
|
||||
import ch.qos.logback.core.boolex.EventEvaluatorBase;
|
||||
|
||||
public class KestraLogFilter extends EventEvaluatorBase<ILoggingEvent> {
|
||||
@Override
|
||||
public boolean evaluate(ILoggingEvent event) throws NullPointerException, EvaluationException {
|
||||
var message = event.getMessage();
|
||||
// as this filter is called very often, for perf,
|
||||
// we use startWith and do all checks successfully instead of using a more elegant construct like Stream...
|
||||
return message.startsWith("outOfOrder mode is active. Migration of schema") ||
|
||||
message.startsWith("Version mismatch : Database version is older than what dialect POSTGRES supports") ||
|
||||
message.startsWith("Failed to bind as java.util.concurrent.Executors$AutoShutdownDelegatedExecutorService is unsupported.");
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ package io.kestra.core.models;
|
||||
import io.kestra.core.utils.MapUtils;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -19,7 +18,6 @@ public record Label(@NotNull String key, @NotNull String value) {
|
||||
public static final String RESTARTED = SYSTEM_PREFIX + "restarted";
|
||||
public static final String REPLAY = SYSTEM_PREFIX + "replay";
|
||||
public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
|
||||
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
|
||||
|
||||
/**
|
||||
* Static helper method for converting a list of labels to a nested map.
|
||||
@@ -48,19 +46,4 @@ public record Label(@NotNull String key, @NotNull String value) {
|
||||
.map(entry -> new Label(entry.getKey(), entry.getValue()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for converting a label string to a map.
|
||||
*
|
||||
* @param label The label string.
|
||||
* @return The map of key/value labels.
|
||||
*/
|
||||
public static Map<String, String> from(String label) {
|
||||
Map<String, String> map = new HashMap<>();
|
||||
String[] keyValueArray = label.split(":");
|
||||
if (keyValueArray.length == 2) {
|
||||
map.put(keyValueArray[0], keyValueArray[1]);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
/**
|
||||
* Interface that can be implemented by classes supporting plugin versioning.
|
||||
*
|
||||
* @see Plugin
|
||||
*/
|
||||
public interface PluginVersioning {
|
||||
|
||||
@Pattern(regexp="\\d+\\.\\d+\\.\\d+(-[a-zA-Z0-9-]+)?|([a-zA-Z0-9]+)")
|
||||
@Schema(title = "The version of the plugin to use.")
|
||||
String getVersion();
|
||||
}
|
||||
@@ -1,292 +0,0 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
import io.kestra.core.models.dashboards.filters.*;
|
||||
import io.kestra.core.utils.Enums;
|
||||
import lombok.Builder;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Builder
|
||||
public record QueryFilter(
|
||||
Field field,
|
||||
Op operation,
|
||||
Object value
|
||||
) {
|
||||
|
||||
@JsonCreator
|
||||
public QueryFilter(
|
||||
@JsonProperty("field") Field field,
|
||||
@JsonProperty("operation") Op operation,
|
||||
@JsonProperty("value") Object value
|
||||
) {
|
||||
this.field = field;
|
||||
this.operation = operation;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public enum Op {
|
||||
EQUALS,
|
||||
NOT_EQUALS,
|
||||
GREATER_THAN,
|
||||
LESS_THAN,
|
||||
GREATER_THAN_OR_EQUAL_TO,
|
||||
LESS_THAN_OR_EQUAL_TO,
|
||||
IN,
|
||||
NOT_IN,
|
||||
STARTS_WITH,
|
||||
ENDS_WITH,
|
||||
CONTAINS,
|
||||
REGEX;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T extends Enum<T>> AbstractFilter<T> toDashboardFilterBuilder(T field, Object value) {
|
||||
switch (this.operation) {
|
||||
case EQUALS:
|
||||
return EqualTo.<T>builder().field(field).value(value).build();
|
||||
case NOT_EQUALS:
|
||||
return NotEqualTo.<T>builder().field(field).value(value).build();
|
||||
case GREATER_THAN:
|
||||
return GreaterThan.<T>builder().field(field).value(value).build();
|
||||
case LESS_THAN:
|
||||
return LessThan.<T>builder().field(field).value(value).build();
|
||||
case GREATER_THAN_OR_EQUAL_TO:
|
||||
return GreaterThanOrEqualTo.<T>builder().field(field).value(value).build();
|
||||
case LESS_THAN_OR_EQUAL_TO:
|
||||
return LessThanOrEqualTo.<T>builder().field(field).value(value).build();
|
||||
case IN:
|
||||
return In.<T>builder().field(field).values((List<Object>) value).build();
|
||||
case NOT_IN:
|
||||
return NotIn.<T>builder().field(field).values((List<Object>) value).build();
|
||||
case STARTS_WITH:
|
||||
return StartsWith.<T>builder().field(field).value(value.toString()).build();
|
||||
case ENDS_WITH:
|
||||
return EndsWith.<T>builder().field(field).value(value.toString()).build();
|
||||
case CONTAINS:
|
||||
return Contains.<T>builder().field(field).value(value.toString()).build();
|
||||
case REGEX:
|
||||
return Regex.<T>builder().field(field).value(value.toString()).build();
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported operation: " + this.operation);
|
||||
}
|
||||
}
|
||||
|
||||
public enum Field {
|
||||
QUERY("q") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.REGEX);
|
||||
}
|
||||
},
|
||||
SCOPE("scope") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
NAMESPACE("namespace") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
|
||||
}
|
||||
},
|
||||
LABELS("labels") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
FLOW_ID("flowId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
START_DATE("startDate") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.GREATER_THAN, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
END_DATE("endDate") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.GREATER_THAN, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
STATE("state") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
TIME_RANGE("timeRange") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH,
|
||||
Op.ENDS_WITH, Op.IN, Op.NOT_IN, Op.REGEX);
|
||||
}
|
||||
},
|
||||
TRIGGER_EXECUTION_ID("triggerExecutionId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
TRIGGER_ID("triggerId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
CHILD_FILTER("childFilter") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
WORKER_ID("workerId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
EXISTING_ONLY("existingOnly") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
MIN_LEVEL("level") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
};
|
||||
|
||||
private static final Map<String, Field> BY_VALUE = Arrays.stream(values())
|
||||
.collect(Collectors.toMap(Field::value, Function.identity()));
|
||||
|
||||
public abstract List<Op> supportedOp();
|
||||
|
||||
private final String value;
|
||||
|
||||
Field(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@JsonCreator
|
||||
public static Field fromString(String value) {
|
||||
return Enums.fromString(value, BY_VALUE, "field");
|
||||
}
|
||||
|
||||
@JsonValue
|
||||
public String value() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
public enum Resource {
|
||||
FLOW {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
|
||||
}
|
||||
},
|
||||
NAMESPACE {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.EXISTING_ONLY);
|
||||
}
|
||||
},
|
||||
EXECUTION {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(
|
||||
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE, Field.TIME_RANGE,
|
||||
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
|
||||
Field.NAMESPACE
|
||||
);
|
||||
}
|
||||
},
|
||||
LOG {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.NAMESPACE, Field.START_DATE, Field.END_DATE,
|
||||
Field.FLOW_ID, Field.TRIGGER_ID, Field.MIN_LEVEL
|
||||
);
|
||||
}
|
||||
},
|
||||
TASK {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.NAMESPACE, Field.QUERY, Field.END_DATE, Field.FLOW_ID, Field.START_DATE,
|
||||
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER
|
||||
);
|
||||
}
|
||||
},
|
||||
TEMPLATE {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.NAMESPACE, Field.QUERY);
|
||||
}
|
||||
},
|
||||
TRIGGER {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.QUERY, Field.NAMESPACE, Field.WORKER_ID, Field.FLOW_ID
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
public abstract List<Field> supportedField();
|
||||
|
||||
/**
|
||||
* Converts {@code Resource} enums to a list of {@code ResourceField},
|
||||
* including fields and their supported operations.
|
||||
*
|
||||
* @return List of {@code ResourceField} with resource names, fields, and operations.
|
||||
*/
|
||||
public static List<ResourceField> asResourceList() {
|
||||
return Arrays.stream(values())
|
||||
.map(Resource::toResourceField)
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static ResourceField toResourceField(Resource resource) {
|
||||
List<FieldOp> fieldOps = resource.supportedField().stream()
|
||||
.map(Resource::toFieldInfo)
|
||||
.toList();
|
||||
return new ResourceField(resource.name().toLowerCase(), fieldOps);
|
||||
}
|
||||
|
||||
private static FieldOp toFieldInfo(Field field) {
|
||||
List<Operation> operations = field.supportedOp().stream()
|
||||
.map(Resource::toOperation)
|
||||
.toList();
|
||||
return new FieldOp(field.name().toLowerCase(), field.value(), operations);
|
||||
}
|
||||
|
||||
private static Operation toOperation(Op op) {
|
||||
return new Operation(op.name(), op.name());
|
||||
}
|
||||
}
|
||||
|
||||
public record ResourceField(String name, List<FieldOp> fields) {
|
||||
}
|
||||
|
||||
public record FieldOp(String name, String value, List<Operation> operations) {
|
||||
}
|
||||
|
||||
public record Operation(String name, String value) {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import com.google.common.annotations.VisibleForTesting;
|
||||
import io.kestra.core.repositories.ServiceInstanceRepositoryInterface;
|
||||
import io.kestra.core.server.Service;
|
||||
import io.kestra.core.server.ServiceInstance;
|
||||
import io.kestra.core.server.ServiceType;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.RoundingMode;
|
||||
@@ -61,7 +60,7 @@ public record ServiceUsage(
|
||||
final Duration interval) {
|
||||
|
||||
List<DailyServiceStatistics> statistics = Arrays
|
||||
.stream(ServiceType.values())
|
||||
.stream(Service.ServiceType.values())
|
||||
.map(type -> of(from, to, repository, type, interval))
|
||||
.toList();
|
||||
return new ServiceUsage(statistics);
|
||||
@@ -70,13 +69,13 @@ public record ServiceUsage(
|
||||
private static DailyServiceStatistics of(final Instant from,
|
||||
final Instant to,
|
||||
final ServiceInstanceRepositoryInterface repository,
|
||||
final ServiceType serviceType,
|
||||
final Service.ServiceType serviceType,
|
||||
final Duration interval) {
|
||||
return of(serviceType, interval, repository.findAllInstancesBetween(serviceType, from, to));
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static DailyServiceStatistics of(final ServiceType serviceType,
|
||||
static DailyServiceStatistics of(final Service.ServiceType serviceType,
|
||||
final Duration interval,
|
||||
final List<ServiceInstance> instances) {
|
||||
// Compute the number of running service per time-interval.
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
package io.kestra.core.models.dashboards;
|
||||
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeName;
|
||||
import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.dashboards.filters.AbstractFilter;
|
||||
import io.kestra.core.repositories.QueryBuilderInterface;
|
||||
import io.kestra.plugin.core.dashboard.data.Executions;
|
||||
import io.kestra.plugin.core.dashboard.data.Logs;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
@@ -13,7 +17,6 @@ import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -44,6 +47,6 @@ public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F
|
||||
|
||||
public abstract Class<? extends QueryBuilderInterface<F>> repositoryClass();
|
||||
|
||||
public abstract void setGlobalFilter(List<QueryFilter> queryFilterList, ZonedDateTime startDate, ZonedDateTime endDate);
|
||||
public abstract void setGlobalFilter(GlobalFilter globalFilter);
|
||||
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
package io.kestra.webserver.models;
|
||||
package io.kestra.core.models.dashboards;
|
||||
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Getter
|
||||
@@ -21,5 +19,4 @@ public class GlobalFilter {
|
||||
private Integer pageNumber;
|
||||
private String namespace;
|
||||
private Map<String, String> labels;
|
||||
private List<QueryFilter> filters;
|
||||
}
|
||||
}
|
||||
@@ -122,10 +122,6 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
return newExecution(flow, null, labels, Optional.empty());
|
||||
}
|
||||
|
||||
public List<Label> getLabels() {
|
||||
return Optional.ofNullable(this.labels).orElse(new ArrayList<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method for constructing a new {@link Execution} object for the given {@link Flow} and
|
||||
* inputs.
|
||||
@@ -820,20 +816,11 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
));
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
this.taskRunList.stream()
|
||||
.filter(taskRun -> taskRun.getOutputs() != null)
|
||||
.collect(Collectors.groupingBy(taskRun -> taskRun.getTaskId()))
|
||||
.forEach((taskId, taskRuns) -> {
|
||||
Map<String, Object> taskOutputs = new HashMap<>();
|
||||
for (TaskRun current : taskRuns) {
|
||||
if (current.getIteration() != null) {
|
||||
taskOutputs = MapUtils.merge(taskOutputs, outputs(current, byIds));
|
||||
} else {
|
||||
taskOutputs.putAll(outputs(current, byIds));
|
||||
}
|
||||
}
|
||||
result.put(taskId, taskOutputs);
|
||||
});
|
||||
for (TaskRun current : this.taskRunList) {
|
||||
if (current.getOutputs() != null) {
|
||||
result = MapUtils.merge(result, outputs(current, byIds));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -846,17 +833,18 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
|
||||
if (parents.isEmpty()) {
|
||||
if (taskRun.getValue() == null) {
|
||||
return taskRun.getOutputs();
|
||||
return Map.of(taskRun.getTaskId(), taskRun.getOutputs());
|
||||
} else {
|
||||
return Map.of(taskRun.getValue(), taskRun.getOutputs());
|
||||
return Map.of(taskRun.getTaskId(),
|
||||
Map.of(taskRun.getValue(), taskRun.getOutputs()));
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Object> result = HashMap.newHashMap(1);
|
||||
Map<String, Object> result = MapUtils.newHashMap(1);
|
||||
Map<String, Object> current = result;
|
||||
|
||||
for (TaskRun t : parents) {
|
||||
HashMap<String, Object> item = HashMap.newHashMap(1);
|
||||
HashMap<String, Object> item = MapUtils.newHashMap(1);
|
||||
current.put(t.getValue(), item);
|
||||
current = item;
|
||||
}
|
||||
@@ -869,7 +857,7 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return Map.of(taskRun.getTaskId(), result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -28,7 +28,6 @@ import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
|
||||
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.kestra.core.validations.FlowValidation;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@@ -38,6 +37,8 @@ import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -94,9 +95,6 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
@Deprecated
|
||||
List<Listener> listeners;
|
||||
|
||||
@Valid
|
||||
List<Task> afterExecution;
|
||||
|
||||
@Valid
|
||||
List<AbstractTrigger> triggers;
|
||||
|
||||
@@ -136,6 +134,11 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
List<SLA> sla;
|
||||
|
||||
|
||||
public Logger logger() {
|
||||
return LoggerFactory.getLogger("flow." + this.id);
|
||||
}
|
||||
|
||||
|
||||
/** {@inheritDoc **/
|
||||
@Override
|
||||
@JsonIgnore
|
||||
@@ -201,10 +204,10 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
|
||||
public Stream<Task> allTasks() {
|
||||
return Stream.of(
|
||||
this.tasks != null ? this.tasks : Collections.<Task>emptyList(),
|
||||
this.errors != null ? this.errors : Collections.<Task>emptyList(),
|
||||
this._finally != null ? this._finally : Collections.<Task>emptyList(),
|
||||
this.afterExecutionTasks()
|
||||
this.tasks != null ? this.tasks : new ArrayList<Task>(),
|
||||
this.errors != null ? this.errors : new ArrayList<Task>(),
|
||||
this._finally != null ? this._finally : new ArrayList<Task>(),
|
||||
this.listenersTasks()
|
||||
)
|
||||
.flatMap(Collection::stream);
|
||||
}
|
||||
@@ -284,14 +287,6 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
public AbstractTrigger findTriggerByTriggerId(String triggerId) {
|
||||
return this.triggers
|
||||
.stream()
|
||||
.filter(trigger -> trigger.getId().equals(triggerId))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated should not be used
|
||||
*/
|
||||
@@ -334,11 +329,15 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
}
|
||||
}
|
||||
|
||||
private List<Task> afterExecutionTasks() {
|
||||
return ListUtils.concat(
|
||||
ListUtils.emptyOnNull(this.getListeners()).stream().flatMap(listener -> listener.getTasks().stream()).toList(),
|
||||
this.getAfterExecution()
|
||||
);
|
||||
private List<Task> listenersTasks() {
|
||||
if (this.getListeners() == null) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
return this.getListeners()
|
||||
.stream()
|
||||
.flatMap(listener -> listener.getTasks().stream())
|
||||
.toList();
|
||||
}
|
||||
|
||||
public boolean equalsWithoutRevision(Flow o) {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.TaskForExecution;
|
||||
import io.kestra.core.models.triggers.AbstractTriggerForExecution;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
@@ -30,9 +29,6 @@ public class FlowForExecution extends AbstractFlow {
|
||||
@JsonProperty("finally")
|
||||
List<TaskForExecution> _finally;
|
||||
|
||||
@Valid
|
||||
List<TaskForExecution> afterExecution;
|
||||
|
||||
@Valid
|
||||
List<AbstractTriggerForExecution> triggers;
|
||||
|
||||
@@ -46,7 +42,6 @@ public class FlowForExecution extends AbstractFlow {
|
||||
.tasks(flow.getTasks().stream().map(TaskForExecution::of).toList())
|
||||
.errors(ListUtils.emptyOnNull(flow.getErrors()).stream().map(TaskForExecution::of).toList())
|
||||
._finally(ListUtils.emptyOnNull(flow.getFinally()).stream().map(TaskForExecution::of).toList())
|
||||
.afterExecution(ListUtils.emptyOnNull(flow.getAfterExecution()).stream().map(TaskForExecution::of).toList())
|
||||
.triggers(ListUtils.emptyOnNull(flow.getTriggers()).stream().map(AbstractTriggerForExecution::of).toList())
|
||||
.disabled(flow.isDisabled())
|
||||
.deleted(flow.isDeleted())
|
||||
|
||||
@@ -31,7 +31,6 @@ public class FlowWithSource extends Flow implements HasSource {
|
||||
.errors(this.errors)
|
||||
._finally(this._finally)
|
||||
.listeners(this.listeners)
|
||||
.afterExecution(this.afterExecution)
|
||||
.triggers(this.triggers)
|
||||
.pluginDefaults(this.pluginDefaults)
|
||||
.disabled(this.disabled)
|
||||
@@ -73,7 +72,6 @@ public class FlowWithSource extends Flow implements HasSource {
|
||||
.tasks(flow.tasks)
|
||||
.errors(flow.errors)
|
||||
._finally(flow._finally)
|
||||
.afterExecution(flow.afterExecution)
|
||||
.listeners(flow.listeners)
|
||||
.triggers(flow.triggers)
|
||||
.pluginDefaults(flow.pluginDefaults)
|
||||
|
||||
@@ -55,7 +55,6 @@ public abstract class AbstractGraph {
|
||||
|
||||
public enum BranchType {
|
||||
ERROR,
|
||||
FINALLY,
|
||||
AFTER_EXECUTION
|
||||
FINALLY
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,9 +33,6 @@ public class GraphCluster extends AbstractGraph {
|
||||
return _finally;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
private final GraphClusterAfterExecution afterExecution;
|
||||
|
||||
@JsonIgnore
|
||||
private final GraphClusterEnd end;
|
||||
|
||||
@@ -53,17 +50,14 @@ public class GraphCluster extends AbstractGraph {
|
||||
this.relationType = null;
|
||||
this.root = new GraphClusterRoot();
|
||||
this._finally = new GraphClusterFinally();
|
||||
this.afterExecution = new GraphClusterAfterExecution();
|
||||
this.end = new GraphClusterEnd();
|
||||
this.taskNode = null;
|
||||
|
||||
this.addNode(this.root);
|
||||
this.addNode(this._finally);
|
||||
this.addNode(this.afterExecution);
|
||||
this.addNode(this.end);
|
||||
|
||||
this.addEdge(this.getFinally(), this.getAfterExecution(), new Relation());
|
||||
this.addEdge(this.getAfterExecution(), this.getEnd(), new Relation());
|
||||
this.addEdge(this.getFinally(), this.getEnd(), new Relation());
|
||||
}
|
||||
|
||||
public GraphCluster(Task task, TaskRun taskRun, List<String> values, RelationType relationType) {
|
||||
@@ -80,17 +74,14 @@ public class GraphCluster extends AbstractGraph {
|
||||
this.relationType = relationType;
|
||||
this.root = new GraphClusterRoot();
|
||||
this._finally = new GraphClusterFinally();
|
||||
this.afterExecution = new GraphClusterAfterExecution();
|
||||
this.end = new GraphClusterEnd();
|
||||
this.taskNode = taskNode;
|
||||
|
||||
this.addNode(this.root);
|
||||
this.addNode(this._finally);
|
||||
this.addNode(this.afterExecution);
|
||||
this.addNode(this.end);
|
||||
|
||||
this.addEdge(this.getFinally(), this.getAfterExecution(), new Relation());
|
||||
this.addEdge(this.getAfterExecution(), this.getEnd(), new Relation());
|
||||
this.addEdge(this.getFinally(), this.getEnd(), new Relation());
|
||||
}
|
||||
|
||||
public void addNode(AbstractGraph node) {
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
package io.kestra.core.models.hierarchies;
|
||||
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import lombok.Getter;
|
||||
|
||||
@Getter
|
||||
public class GraphClusterAfterExecution extends AbstractGraph {
|
||||
public GraphClusterAfterExecution() {
|
||||
super("after-execution-" + IdUtils.create());
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ public enum RelationType {
|
||||
CHOICE,
|
||||
ERROR,
|
||||
FINALLY,
|
||||
AFTER_EXECUTION,
|
||||
PARALLEL,
|
||||
DYNAMIC
|
||||
}
|
||||
|
||||
@@ -90,10 +90,5 @@ public class SubflowGraphTask extends AbstractGraphTask {
|
||||
public String getType() {
|
||||
return ((TaskInterface) subflowTask).getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return ((TaskInterface) subflowTask).getVersion();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,8 +22,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
/**
|
||||
* Define a plugin properties that will be rendered and converted to a target type at use time.
|
||||
*
|
||||
@@ -138,31 +136,12 @@ public class Property<T> {
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asList(Class, Map)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T, I> T asList(Property<T> property, RunContext runContext, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.value == null) {
|
||||
String rendered = runContext.render(property.expression, variables);
|
||||
JavaType type = MAPPER.getTypeFactory().constructCollectionLikeType(List.class, itemClazz);
|
||||
try {
|
||||
String trimmedExpression = property.expression.trim();
|
||||
// We need to detect if the expression is already a list or if it's a pebble expression (for eg. referencing a variable containing a list).
|
||||
// Doing that allows us to, if it's an expression, first render then read it as a list.
|
||||
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
|
||||
property.value = MAPPER.readValue(runContext.render(property.expression, variables), type);
|
||||
}
|
||||
// Otherwise, if it's already a list, we read it as a list first then render it from run context which handle list rendering by rendering each item of the list
|
||||
else {
|
||||
List<?> asRawList = MAPPER.readValue(property.expression, List.class);
|
||||
property.value = (T) asRawList.stream()
|
||||
.map(throwFunction(item -> {
|
||||
if (item instanceof String str) {
|
||||
return MAPPER.convertValue(runContext.render(str, variables), itemClazz);
|
||||
} else if (item instanceof Map map) {
|
||||
return MAPPER.convertValue(runContext.render(map, variables), itemClazz);
|
||||
}
|
||||
return item;
|
||||
}))
|
||||
.toList();
|
||||
}
|
||||
property.value = MAPPER.readValue(rendered, type);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalVariableEvaluationException(e);
|
||||
}
|
||||
@@ -190,7 +169,6 @@ public class Property<T> {
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class, Map)
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public static <T, K,V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.value == null) {
|
||||
JavaType targetMapType = MAPPER.getTypeFactory().constructMapType(Map.class, keyClass, valueClass);
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
package io.kestra.core.models.tasks;
|
||||
|
||||
public enum FileExistComportment {
|
||||
OVERWRITE, FAIL, WARN, IGNORE
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.models.tasks;
|
||||
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@@ -10,14 +11,12 @@ import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
import jakarta.validation.Valid;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
|
||||
@Builder
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Introspected
|
||||
@Jacksonized
|
||||
public class NamespaceFiles {
|
||||
@Schema(
|
||||
title = "Whether to enable namespace files to be loaded into the working directory. If explicitly set to `true` in a task, it will load all [Namespace Files](https://kestra.io/docs/developer-guide/namespace-files) into the task's working directory. Note that this property is by default set to `true` so that you can specify only the `include` and `exclude` properties to filter the files to load without having to explicitly set `enabled` to `true`."
|
||||
@@ -36,16 +35,4 @@ public class NamespaceFiles {
|
||||
)
|
||||
@Valid
|
||||
private Property<List<String>> exclude;
|
||||
|
||||
@Schema(
|
||||
title = "A list of namespaces in which searching files. The files are loaded in the namespace order, and only the latest version of a file is kept. Meaning if a file is present in the first and second namespace, only the file present on the second namespace will be loaded."
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<List<String>> namespaces = Property.of(List.of("{{flow.namespace}}"));
|
||||
|
||||
@Schema(
|
||||
title = "Comportment of the task if a file already exist in the working directory."
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<FileExistComportment> ifExists = Property.of(FileExistComportment.OVERWRITE);
|
||||
}
|
||||
|
||||
@@ -31,8 +31,6 @@ abstract public class Task implements TaskInterface {
|
||||
|
||||
protected String type;
|
||||
|
||||
protected String version;
|
||||
|
||||
private String description;
|
||||
|
||||
@Valid
|
||||
|
||||
@@ -16,8 +16,6 @@ public class TaskForExecution implements TaskInterface {
|
||||
|
||||
protected String type;
|
||||
|
||||
protected String version;
|
||||
|
||||
protected List<TaskForExecution> tasks;
|
||||
|
||||
protected List<Input<?>> inputs;
|
||||
|
||||
@@ -2,14 +2,12 @@ package io.kestra.core.models.tasks;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import io.kestra.core.models.Plugin;
|
||||
import io.kestra.core.models.PluginVersioning;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
|
||||
public interface TaskInterface extends Plugin, PluginVersioning {
|
||||
public interface TaskInterface extends Plugin {
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp="^[a-zA-Z0-9][a-zA-Z0-9_-]*")
|
||||
@@ -18,6 +16,5 @@ public interface TaskInterface extends Plugin, PluginVersioning {
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
|
||||
@Schema(title = "The class name of this task.")
|
||||
String getType();
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user