mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 14:00:23 -05:00
Compare commits
1 Commits
feat/execu
...
feat/not-n
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e83253ca3 |
@@ -1,82 +0,0 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ARG BUILDPLATFORM
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
USER root
|
||||
WORKDIR /root
|
||||
|
||||
RUN apt update && apt install -y \
|
||||
apt-transport-https ca-certificates gnupg curl wget git zip unzip less zsh net-tools iputils-ping jq lsof
|
||||
|
||||
ENV HOME="/root"
|
||||
|
||||
# --------------------------------------
|
||||
# Git
|
||||
# --------------------------------------
|
||||
# Need to add the devcontainer workspace folder as a safe directory to enable git
|
||||
# version control system to be enabled in the containers file system.
|
||||
RUN git config --global --add safe.directory "/workspaces/kestra"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Oh my zsh
|
||||
# --------------------------------------
|
||||
RUN sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" -- \
|
||||
-t robbyrussell \
|
||||
-p git -p node -p npm
|
||||
|
||||
ENV SHELL=/bin/zsh
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Java
|
||||
# --------------------------------------
|
||||
ARG OS_ARCHITECTURE
|
||||
|
||||
RUN mkdir -p /usr/java
|
||||
RUN echo "Building on platform: $BUILDPLATFORM"
|
||||
RUN case "$BUILDPLATFORM" in \
|
||||
"linux/amd64") OS_ARCHITECTURE="x64_linux" ;; \
|
||||
"linux/arm64") OS_ARCHITECTURE="aarch64_linux" ;; \
|
||||
"darwin/amd64") OS_ARCHITECTURE="x64_mac" ;; \
|
||||
"darwin/arm64") OS_ARCHITECTURE="aarch64_mac" ;; \
|
||||
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
|
||||
esac && \
|
||||
wget "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz" && \
|
||||
mv OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz openjdk-21.0.7.tar.gz
|
||||
RUN tar -xzvf openjdk-21.0.7.tar.gz && \
|
||||
mv jdk-21.0.7+6 jdk-21 && \
|
||||
mv jdk-21 /usr/java/
|
||||
ENV JAVA_HOME=/usr/java/jdk-21
|
||||
ENV PATH="$PATH:$JAVA_HOME/bin"
|
||||
# Will load a custom configuration file for Micronaut
|
||||
ENV MICRONAUT_ENVIRONMENTS=local,override
|
||||
# Sets the path where you save plugins as Jar and is loaded during the startup process
|
||||
ENV KESTRA_PLUGINS_PATH="/workspaces/kestra/local/plugins"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Node.js
|
||||
# --------------------------------------
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_22.x -o nodesource_setup.sh \
|
||||
&& bash nodesource_setup.sh && apt install -y nodejs
|
||||
# Increases JavaScript heap memory to 4GB to prevent heap out of error during startup
|
||||
ENV NODE_OPTIONS=--max-old-space-size=4096
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Python
|
||||
# --------------------------------------
|
||||
RUN apt install -y python3 pip python3-venv
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# SSH
|
||||
# --------------------------------------
|
||||
RUN mkdir -p ~/.ssh
|
||||
RUN touch ~/.ssh/config
|
||||
RUN echo "Host github.com" >> ~/.ssh/config \
|
||||
&& echo " IdentityFile ~/.ssh/id_ed25519" >> ~/.ssh/config
|
||||
RUN touch ~/.ssh/id_ed25519
|
||||
# --------------------------------------
|
||||
@@ -1,143 +0,0 @@
|
||||
# Kestra Devcontainer
|
||||
|
||||
This devcontainer provides a quick and easy setup for anyone using VSCode to get up and running quickly with this project to start development on either the frontend or backend. It bootstraps a docker container for you to develop inside of without the need to manually setup the environment.
|
||||
|
||||
---
|
||||
|
||||
## INSTRUCTIONS
|
||||
|
||||
### Setup:
|
||||
|
||||
Take a look at this guide to get an idea of what the setup is like as this devcontainer setup follows this approach: https://kestra.io/docs/getting-started/contributing
|
||||
|
||||
Once you have this repo cloned to your local system, you will need to install the VSCode extension [Remote Development](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack).
|
||||
|
||||
Then run the following command from the command palette:
|
||||
`Dev Containers: Open Folder in Container...` and select your Kestra root folder.
|
||||
|
||||
This will then put you inside a docker container ready for development.
|
||||
|
||||
NOTE: you'll need to wait for the gradle build to finish and compile Java files but this process should happen automatically within VSCode.
|
||||
|
||||
In the meantime, you can move onto the next step...
|
||||
|
||||
---
|
||||
|
||||
### Development:
|
||||
- (Optional) By default, your dev server will target `localhost:8080`. If your backend is running elsewhere, you can create `.env.development.local` under `ui` folder with this content:
|
||||
```
|
||||
VITE_APP_API_URL={myApiUrl}
|
||||
```
|
||||
|
||||
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
|
||||
|
||||
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
|
||||
|
||||
Now you have two choices:
|
||||
|
||||
`Local mode`:
|
||||
|
||||
Runs the Kestra server in local mode which uses a H2 database, so this is the only config you'd need:
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
You can then open a new terminal and run the following command to start the backend server: `./gradlew runLocal`
|
||||
|
||||
`Standalone mode`:
|
||||
|
||||
Runs in standalone mode which uses Postgres. Make sure to have a local Postgres instance already running on localhost:
|
||||
|
||||
```yaml
|
||||
kestra:
|
||||
repository:
|
||||
type: postgres
|
||||
storage:
|
||||
type: local
|
||||
local:
|
||||
base-path: "/app/storage"
|
||||
queue:
|
||||
type: postgres
|
||||
tasks:
|
||||
tmp-dir:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
# It is important to note that you must use the "host.docker.internal" host when connecting to a docker container outside of your devcontainer as attempting to use localhost will only point back to this devcontainer.
|
||||
url: jdbc:postgresql://host.docker.internal:5432/kestra
|
||||
driverClassName: org.postgresql.Driver
|
||||
username: kestra
|
||||
password: k3str4
|
||||
|
||||
flyway:
|
||||
datasources:
|
||||
postgres:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
# We must ignore missing migrations as we may delete the wrong ones or delete those that are not used anymore.
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
Then add the following settings to the `.vscode/launch.json` file:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "java",
|
||||
"name": "Kestra Standalone",
|
||||
"request": "launch",
|
||||
"mainClass": "io.kestra.cli.App",
|
||||
"projectName": "cli",
|
||||
"args": "server standalone"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
You can then use the VSCode `Run and Debug` extension to start the Kestra server.
|
||||
|
||||
Additionally, if you're doing frontend development, you can run `npm run dev` from the `ui` folder after having the above running (which will provide a backend) to access your application from `localhost:5173`. This has the benefit to watch your changes and hot-reload upon doing frontend changes.
|
||||
|
||||
#### Plugins
|
||||
If you want your plugins to be loaded inside your devcontainer, point the `source` field to a folder containing jars of the plugins you want to embed in the following snippet in `devcontainer.json`:
|
||||
```
|
||||
"mounts": [
|
||||
{
|
||||
"source": "/absolute/path/to/your/local/jar/plugins/folder",
|
||||
"target": "/workspaces/kestra/local/plugins",
|
||||
"type": "bind"
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GIT
|
||||
|
||||
If you want to commit to GitHub, make sure to navigate to the `~/.ssh` folder and either create a new SSH key or override the existing `id_ed25519` file and paste an existing SSH key from your local machine into this file. You will then need to change the permissions of the file by running: `chmod 600 id_ed25519`. This will allow you to then push to GitHub.
|
||||
|
||||
---
|
||||
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"name": "kestra",
|
||||
"build": {
|
||||
"context": ".",
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"workspaceFolder": "/workspaces/kestra",
|
||||
"forwardPorts": [5173, 8080],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/bin/zsh"
|
||||
}
|
||||
},
|
||||
"workbench.iconTheme": "vscode-icons",
|
||||
"editor.tabSize": 4,
|
||||
"editor.formatOnSave": true,
|
||||
"files.insertFinalNewline": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"telemetry.telemetryLevel": "off",
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": "active"
|
||||
},
|
||||
"extensions": [
|
||||
"redhat.vscode-yaml",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"eamodio.gitlens",
|
||||
"esbenp.prettier-vscode",
|
||||
"aaron-bond.better-comments",
|
||||
"codeandstuff.package-json-upgrade",
|
||||
"andys8.jest-snippets",
|
||||
"oderwat.indent-rainbow",
|
||||
"evondev.indent-rainbow-palettes",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"IronGeek.vscode-env",
|
||||
"yoavbls.pretty-ts-errors",
|
||||
"github.vscode-github-actions",
|
||||
"vscjava.vscode-java-pack",
|
||||
"docker.docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
9
.github/CONTRIBUTING.md
vendored
9
.github/CONTRIBUTING.md
vendored
@@ -37,10 +37,6 @@ The following dependencies are required to build Kestra locally:
|
||||
- Docker & Docker Compose
|
||||
- an IDE (Intellij IDEA, Eclipse or VS Code)
|
||||
|
||||
Thanks to the Kestra community, if using VSCode, you can also start development on either the frontend or backend with a bootstrapped docker container without the need to manually set up the environment.
|
||||
|
||||
Check out the [README](../.devcontainer/README.md) for set-up instructions and the associated [Dockerfile](../.devcontainer/Dockerfile) in the respository to get started.
|
||||
|
||||
To start contributing:
|
||||
- [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the repository
|
||||
- Clone the fork on your workstation:
|
||||
@@ -50,7 +46,7 @@ git clone git@github.com:{YOUR_USERNAME}/kestra.git
|
||||
cd kestra
|
||||
```
|
||||
|
||||
#### Develop on the backend
|
||||
#### Develop backend
|
||||
The backend is made with [Micronaut](https://micronaut.io).
|
||||
|
||||
Open the cloned repository in your favorite IDE. In most of decent IDEs, Gradle build will be detected and all dependencies will be downloaded.
|
||||
@@ -76,10 +72,11 @@ python3 -m pip install virtualenv
|
||||
```
|
||||
|
||||
|
||||
#### Develop on the frontend
|
||||
#### Develop frontend
|
||||
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
|
||||
|
||||
- `npm install`
|
||||
- create a file `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
|
||||
- `npm run dev` will start the development server with hot reload.
|
||||
- The server start by default on port 5173 and is reachable on `http://localhost:5173`
|
||||
- You can run `npm run build` in order to build the front-end that will be delivered from the backend (without running the `npm run dev`) above.
|
||||
|
||||
57
.github/actions/generate-translations/action.yml
vendored
Normal file
57
.github/actions/generate-translations/action.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Generate Translations
|
||||
description: "Required the environment variable OPENAI_API_KEY to be set. This action will generate translations for the UI."
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: 'GitHub Token'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
shell: bash
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
shell: bash
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
shell: bash
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
shell: bash
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Check for changes and commit
|
||||
env:
|
||||
GH_TOKEN: ${{ inputs.github-token }}
|
||||
shell: bash
|
||||
run: |
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin ${{ github.head_ref }}
|
||||
26
.github/dependabot.yml
vendored
26
.github/dependabot.yml
vendored
@@ -1,31 +1,26 @@
|
||||
# See GitHub's docs for more information on this file:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain dependencies for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every week
|
||||
interval: "weekly"
|
||||
day: "wednesday"
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
open-pull-requests-limit: 50
|
||||
|
||||
# Maintain dependencies for Gradle modules
|
||||
- package-ecosystem: "gradle"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to Gradle modules every week
|
||||
interval: "weekly"
|
||||
day: "wednesday"
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
open-pull-requests-limit: 50
|
||||
|
||||
# Maintain dependencies for NPM modules
|
||||
- package-ecosystem: "npm"
|
||||
@@ -36,15 +31,8 @@ updates:
|
||||
time: "08:00"
|
||||
timezone: "Europe/Paris"
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
labels: ["dependency-upgrade"]
|
||||
ignore:
|
||||
# Ignore updates of version 1.x, as we're using the beta of 2.x (still in beta)
|
||||
# Ignore updates of version 1.x, as we're using beta of 2.x
|
||||
- dependency-name: "vue-virtual-scroller"
|
||||
versions:
|
||||
- "1.x"
|
||||
|
||||
# Ignore updates to monaco-yaml, version is pinned to 5.3.1 due to patch-package script additions
|
||||
- dependency-name: "monaco-yaml"
|
||||
versions:
|
||||
- ">=5.3.2"
|
||||
versions: ["1.x"]
|
||||
|
||||
67
.github/workflows/auto-translate-ui-keys.yml
vendored
67
.github/workflows/auto-translate-ui-keys.yml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Auto-Translate UI keys and create PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9-21/3 * * *" # Every 3 hours from 9 AM to 9 PM
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retranslate_modified_keys:
|
||||
description: "Whether to re-translate modified keys even if they already have translations."
|
||||
type: choice
|
||||
options:
|
||||
- "false"
|
||||
- "true"
|
||||
default: "false"
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
translations:
|
||||
name: Translations
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
name: Checkout
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Commit and create PR
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="chore/update-translations-$(date +%s)"
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
|
||||
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
|
||||
gh pr create --title "Translations from en.json" --body $'This PR was created automatically by a GitHub Action.\n\nSomeone from the @kestra-io/frontend team needs to review and merge.' --base ${{ github.ref_name }} --head $BRANCH_NAME
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
4
.github/workflows/codeql-analysis.yml
vendored
4
.github/workflows/codeql-analysis.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
- name: Build with Gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
run: ./gradlew testClasses -x :ui:assembleFrontend
|
||||
run: ./gradlew testClasses -x :ui:installFrontend -x :ui:assembleFrontend
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
|
||||
142
.github/workflows/docker.yml
vendored
Normal file
142
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
name: Create Docker images on Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retag-latest:
|
||||
description: 'Retag latest Docker images'
|
||||
required: true
|
||||
type: string
|
||||
default: "true"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
release-tag:
|
||||
description: 'Kestra Release Tag'
|
||||
required: false
|
||||
type: string
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
jobs:
|
||||
plugins:
|
||||
name: List Plugins
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
docker:
|
||||
name: Publish Docker
|
||||
needs: [ plugins ]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: ""
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
python-libs: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Vars
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
if [[ "${{ inputs.release-tag }}" == "" ]]; then
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="${{ inputs.release-tag }}"
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
# Download release
|
||||
- name: Download release
|
||||
uses: robinraju/release-downloader@v1.11
|
||||
with:
|
||||
tag: ${{steps.vars.outputs.tag}}
|
||||
fileName: 'kestra-*'
|
||||
out-file-path: build/executable
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Docker Build and push
|
||||
- name: Push to Docker Hub
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
|
||||
|
||||
- name: Install regctl
|
||||
if: github.event.inputs.retag-latest == 'true'
|
||||
uses: regclient/actions/regctl-installer@main
|
||||
|
||||
- name: Retag to latest
|
||||
if: github.event.inputs.retag-latest == 'true'
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ':github-actions:'
|
||||
channel: 'C02DQ1A7JLR' # _int_git channel
|
||||
188
.github/workflows/e2e.yml
vendored
188
.github/workflows/e2e.yml
vendored
@@ -1,86 +1,158 @@
|
||||
name: 'E2E tests revival'
|
||||
description: 'New E2E tests implementation started by Roman. Based on playwright in npm UI project, tests Kestra OSS develop docker image. These tests are written from zero, lets make them unflaky from the start!.'
|
||||
name: 'Reusable Workflow for Running End-to-End Tests'
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *" # Every hour
|
||||
workflow_call:
|
||||
inputs:
|
||||
noInputYet:
|
||||
description: 'not input yet.'
|
||||
required: false
|
||||
tags:
|
||||
description: "Tags used for filtering tests to include for QA."
|
||||
type: string
|
||||
default: "no input"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
noInputYet:
|
||||
description: 'not input yet.'
|
||||
required: false
|
||||
required: true
|
||||
docker-artifact-name:
|
||||
description: "The GitHub artifact containing the Kestra docker image."
|
||||
type: string
|
||||
default: "no input"
|
||||
required: false
|
||||
docker-image-tag:
|
||||
description: "The Docker image Tag for Kestra"
|
||||
default: 'kestra/kestra:develop'
|
||||
type: string
|
||||
required: true
|
||||
backend:
|
||||
description: "The Kestra backend type to be used for E2E tests."
|
||||
type: string
|
||||
required: true
|
||||
default: "postgres"
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: "The Google Service Account."
|
||||
required: false
|
||||
jobs:
|
||||
check:
|
||||
timeout-minutes: 15
|
||||
timeout-minutes: 60
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
E2E_TEST_DOCKER_DIR: ./kestra/e2e-tests/docker
|
||||
KESTRA_BASE_URL: http://127.27.27.27:8080/ui/
|
||||
steps:
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
# Checkout kestra
|
||||
- name: Checkout kestra
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: kestra
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
- name: Install Npm dependencies
|
||||
run: |
|
||||
cd kestra/ui
|
||||
npm i
|
||||
npx playwright install --with-deps chromium
|
||||
# Get Docker Image
|
||||
- name: Download Kestra Image
|
||||
if: inputs.docker-artifact-name != ''
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.docker-artifact-name }}
|
||||
path: /tmp
|
||||
|
||||
- name: Run E2E Tests
|
||||
- name: Load Kestra Image
|
||||
if: inputs.docker-artifact-name != ''
|
||||
run: |
|
||||
docker load --input /tmp/${{ inputs.docker-artifact-name }}.tar
|
||||
|
||||
# Docker Compose
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
if: inputs.docker-artifact-name == ''
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
# Build configuration
|
||||
- name: Create additional application configuration
|
||||
run: |
|
||||
touch ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
|
||||
|
||||
- name: Setup additional application configuration
|
||||
if: env.APPLICATION_SECRETS != null
|
||||
env:
|
||||
APPLICATION_SECRETS: ${{ secrets.APPLICATION_SECRETS }}
|
||||
run: |
|
||||
echo $APPLICATION_SECRETS | base64 -d > ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
|
||||
|
||||
# Deploy Docker Compose Stack
|
||||
- name: Run Kestra (${{ inputs.backend }})
|
||||
env:
|
||||
KESTRA_DOCKER_IMAGE: ${{ inputs.docker-image-tag }}
|
||||
run: |
|
||||
cd ${{ env.E2E_TEST_DOCKER_DIR }}
|
||||
echo "KESTRA_DOCKER_IMAGE=$KESTRA_DOCKER_IMAGE" >> .env
|
||||
docker compose -f docker-compose-${{ inputs.backend }}.yml up -d
|
||||
|
||||
- name: Install Playwright Deps
|
||||
run: |
|
||||
cd kestra
|
||||
sh build-and-start-e2e-tests.sh
|
||||
./gradlew playwright --args="install-deps"
|
||||
|
||||
- name: Upload Playwright Report as Github artifact
|
||||
# 'With this report, you can analyze locally the results of the tests. see https://playwright.dev/docs/ci-intro#html-report'
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ !cancelled() }}
|
||||
# Run E2E Tests
|
||||
- name: Wait For Kestra UI
|
||||
run: |
|
||||
# Start time
|
||||
START_TIME=$(date +%s)
|
||||
# Timeout duration in seconds (5 minutes)
|
||||
TIMEOUT_DURATION=$((5 * 60))
|
||||
while [ $(curl -s -L -o /dev/null -w %{http_code} $KESTRA_BASE_URL) != 200 ]; do
|
||||
echo -e $(date) "\tKestra server HTTP state: " $(curl -k -L -s -o /dev/null -w %{http_code} $KESTRA_BASE_URL) " (waiting for 200)";
|
||||
# Check the elapsed time
|
||||
CURRENT_TIME=$(date +%s)
|
||||
ELAPSED_TIME=$((CURRENT_TIME - START_TIME))
|
||||
# Break the loop if the elapsed time exceeds the timeout duration
|
||||
if [ $ELAPSED_TIME -ge $TIMEOUT_DURATION ]; then
|
||||
echo "Timeout reached: Exiting after 5 minutes."
|
||||
exit 1;
|
||||
fi
|
||||
sleep 2;
|
||||
done;
|
||||
echo "Kestra is running: $KESTRA_BASE_URL 🚀";
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run E2E Tests (${{ inputs.tags }})
|
||||
if: inputs.tags != ''
|
||||
run: |
|
||||
cd kestra
|
||||
./gradlew e2eTestsCheck -P tags=${{ inputs.tags }}
|
||||
|
||||
- name: Run E2E Tests
|
||||
if: inputs.tags == ''
|
||||
run: |
|
||||
cd kestra
|
||||
./gradlew e2eTestsCheck
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
name: playwright-report
|
||||
path: kestra/ui/playwright-report/
|
||||
retention-days: 7
|
||||
# Allure check
|
||||
# TODO I don't know what it should do
|
||||
# - uses: rlespinasse/github-slug-action@v5
|
||||
# name: Allure - Generate slug variables
|
||||
#
|
||||
# - name: Allure - Publish report
|
||||
# uses: andrcuns/allure-publish-action@v2.9.0
|
||||
# if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
# continue-on-error: true
|
||||
# env:
|
||||
# GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
# JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
# with:
|
||||
# storageType: gcs
|
||||
# resultsGlob: "**/build/allure-results"
|
||||
# bucket: internal-kestra-host
|
||||
# baseUrl: "https://internal.dev.kestra.io"
|
||||
# prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
# copyLatest: true
|
||||
# ignoreMissingResults: true
|
||||
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: build/allure-results
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/playwright') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
4
.github/workflows/gradle-release-plugins.yml
vendored
4
.github/workflows/gradle-release-plugins.yml
vendored
@@ -21,12 +21,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
|
||||
17
.github/workflows/gradle-release.yml
vendored
17
.github/workflows/gradle-release.yml
vendored
@@ -27,19 +27,18 @@ jobs:
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
|
||||
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
|
||||
exit 1;
|
||||
fi
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
path: kestra
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
@@ -63,20 +62,18 @@ jobs:
|
||||
- name: Run Gradle Release
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
run: |
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
|
||||
|
||||
cd kestra
|
||||
|
||||
|
||||
# Create and push release branch
|
||||
git checkout -b "$PUSH_RELEASE_BRANCH";
|
||||
git push -u origin "$PUSH_RELEASE_BRANCH";
|
||||
|
||||
|
||||
# Run gradle release
|
||||
git checkout develop;
|
||||
|
||||
|
||||
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
|
||||
# -SNAPSHOT qualifier maybe used to test release-candidates
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
|
||||
26
.github/workflows/main.yml
vendored
26
.github/workflows/main.yml
vendored
@@ -4,8 +4,9 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
required: false
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
push:
|
||||
branches:
|
||||
@@ -17,7 +18,7 @@ on:
|
||||
- v*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -30,10 +31,9 @@ jobs:
|
||||
release:
|
||||
name: Release
|
||||
needs: [tests]
|
||||
if: "!startsWith(github.ref, 'refs/heads/releases')"
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
with:
|
||||
plugin-version: ${{ inputs.plugin-version != '' && inputs.plugin-version || (github.ref == 'refs/heads/develop' && 'LATEST-SNAPSHOT' || 'LATEST') }}
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
@@ -42,8 +42,7 @@ jobs:
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
@@ -52,14 +51,15 @@ jobs:
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
- name: Trigger EE Workflow
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
|
||||
# Update
|
||||
- name: Github - Update internal
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
if: github.ref == 'refs/heads/develop' && needs.docker.result == 'success'
|
||||
with:
|
||||
workflow: oss-build.yml
|
||||
repo: kestra-io/infra
|
||||
ref: master
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/kestra-ee
|
||||
event-type: "oss-updated"
|
||||
|
||||
|
||||
# Slack
|
||||
- name: Slack - Notification
|
||||
|
||||
36
.github/workflows/pull-request.yml
vendored
36
.github/workflows/pull-request.yml
vendored
@@ -6,15 +6,11 @@ on:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ********************************************************************************************************************
|
||||
# File changes detection
|
||||
# ********************************************************************************************************************
|
||||
file-changes:
|
||||
if: ${{ github.event.pull_request.draft == false }}
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
@@ -29,16 +25,34 @@ jobs:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
translations:
|
||||
- 'ui/src/translations/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# ********************************************************************************************************************
|
||||
# Tests
|
||||
# ********************************************************************************************************************
|
||||
translations:
|
||||
name: 'Translations - Generate translations'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
needs: file-changes
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
if: "needs.file-changes.outputs.translations == 'true'"
|
||||
|
||||
- id: generate-translations
|
||||
name: Translations - Generate translations
|
||||
if: "needs.file-changes.outputs.translations == 'true'"
|
||||
uses: ./.github/actions/generate-translations
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: [file-changes]
|
||||
needs: [file-changes, translations]
|
||||
if: "needs.file-changes.outputs.ui == 'true'"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
@@ -56,10 +70,6 @@ jobs:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
e2e-tests:
|
||||
name: E2E - Tests
|
||||
uses: ./.github/workflows/e2e.yml
|
||||
|
||||
end:
|
||||
name: End
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.github/workflows/setversion-tag-plugins.yml
vendored
2
.github/workflows/setversion-tag-plugins.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
11
.github/workflows/setversion-tag.yml
vendored
11
.github/workflows/setversion-tag.yml
vendored
@@ -22,19 +22,20 @@ jobs:
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
CURRENT_BRANCH="{{ github.ref }}"
|
||||
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
|
||||
|
||||
CURRENT_BRANCH="$GITHUB_REF"
|
||||
|
||||
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
|
||||
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -54,4 +55,4 @@ jobs:
|
||||
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
|
||||
git push
|
||||
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
|
||||
git push --tags
|
||||
git push origin "v$RELEASE_VERSION"
|
||||
50
.github/workflows/vulnerabilities-check.yml
vendored
50
.github/workflows/vulnerabilities-check.yml
vendored
@@ -8,21 +8,18 @@ on:
|
||||
env:
|
||||
JAVA_VERSION: '21'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-check:
|
||||
name: Dependency Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
@@ -40,7 +37,7 @@ jobs:
|
||||
- name: Npm - Install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
run: npm install
|
||||
|
||||
# Run OWASP dependency check plugin
|
||||
- name: Gradle Dependency Check
|
||||
@@ -60,18 +57,14 @@ jobs:
|
||||
develop-image-check:
|
||||
name: Image Check (develop)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
@@ -87,36 +80,24 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.32.0
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: 'template'
|
||||
template: '@/contrib/sarif.tpl'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
output: 'trivy-results.sarif'
|
||||
format: table
|
||||
skip-dirs: /app/plugins
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: docker-
|
||||
scanners: vuln
|
||||
|
||||
latest-image-check:
|
||||
name: Image Check (latest)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
@@ -132,16 +113,9 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.32.0
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
skip-dirs: /app/plugins
|
||||
scanners: vuln
|
||||
severity: 'CRITICAL,HIGH'
|
||||
output: 'trivy-results.sarif'
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
scanners: vuln
|
||||
12
.github/workflows/workflow-backend-test.yml
vendored
12
.github/workflows/workflow-backend-test.yml
vendored
@@ -16,11 +16,6 @@ on:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
@@ -29,10 +24,8 @@ jobs:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
@@ -61,7 +54,7 @@ jobs:
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
uses: dorny/test-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
@@ -70,7 +63,6 @@ jobs:
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
|
||||
73
.github/workflows/workflow-build-artifacts.yml
vendored
73
.github/workflows/workflow-build-artifacts.yml
vendored
@@ -1,7 +1,23 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call: {}
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
outputs:
|
||||
docker-tag:
|
||||
value: ${{ jobs.build.outputs.docker-tag }}
|
||||
description: "The Docker image Tag for Kestra"
|
||||
docker-artifact-name:
|
||||
value: ${{ jobs.build.outputs.docker-artifact-name }}
|
||||
description: "The GitHub artifact containing the Kestra docker image name."
|
||||
plugins:
|
||||
value: ${{ jobs.build.outputs.plugins }}
|
||||
description: "The Kestra plugins list used for the build."
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -15,7 +31,7 @@ jobs:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -52,7 +68,7 @@ jobs:
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ $PLUGINS" >> $GITHUB_OUTPUT
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
@@ -66,6 +82,50 @@ jobs:
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Setup - Docker vars
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Setup Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Docker - Build & export image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Artifacts - Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -78,3 +138,10 @@ jobs:
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Artifacts - Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
58
.github/workflows/workflow-frontend-test.yml
vendored
58
.github/workflows/workflow-frontend-test.yml
vendored
@@ -19,27 +19,14 @@ jobs:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Cache Node Modules
|
||||
id: cache-node-modules
|
||||
uses: actions/cache@v4
|
||||
- id: checkout
|
||||
name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: |
|
||||
ui/node_modules
|
||||
key: modules-${{ hashFiles('ui/package-lock.json') }}
|
||||
|
||||
- name: Cache Playwright Binaries
|
||||
id: cache-playwright
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/ms-playwright
|
||||
key: playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Npm - install
|
||||
if: steps.cache-node-modules.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
@@ -51,20 +38,47 @@ jobs:
|
||||
workdir: ui
|
||||
|
||||
- name: Npm - Run build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:unit -- --coverage
|
||||
run: npm run test:cicd
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
if: steps.cache-playwright.outputs.cache-hit != 'true'
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Run storybook component tests
|
||||
- name: Storybook - Build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:storybook -- --coverage
|
||||
run: npm run build-storybook --quiet
|
||||
|
||||
- name: Storybook - Run tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:127.0.0.1:6006 && npm run test:storybook"
|
||||
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
flags: frontend
|
||||
66
.github/workflows/workflow-github-release.yml
vendored
66
.github/workflows/workflow-github-release.yml
vendored
@@ -1,74 +1,41 @@
|
||||
name: Github - Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "The Github personal token."
|
||||
required: true
|
||||
SLACK_RELEASES_WEBHOOK_URL:
|
||||
description: "The Slack webhook URL."
|
||||
required: true
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Github - Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check out
|
||||
- name: Checkout - Repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- name: Checkout - Actions
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
sparse-checkout-cone-mode: true
|
||||
path: actions
|
||||
sparse-checkout: |
|
||||
.github/actions
|
||||
|
||||
# Download Exec
|
||||
# Must be done after checkout actions
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Check if current tag is latest
|
||||
id: is_latest
|
||||
run: |
|
||||
latest_tag=$(git tag | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | sed 's/^v//' | sort -V | tail -n1)
|
||||
current_tag="${GITHUB_REF_NAME#v}"
|
||||
if [ "$current_tag" = "$latest_tag" ]; then
|
||||
echo "latest=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "latest=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
env:
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
uses: ./actions/.github/actions/github-release
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
env:
|
||||
MAKE_LATEST: ${{ steps.is_latest.outputs.latest }}
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
- name: GitHub - Create release
|
||||
id: create_github_release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: steps.create_github_release.conclusion == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
@@ -78,11 +45,4 @@ jobs:
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
|
||||
- name: Merge Release Notes
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
uses: ./actions/.github/actions/github-release-note-merge
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
}
|
||||
206
.github/workflows/workflow-publish-docker.yml
vendored
206
.github/workflows/workflow-publish-docker.yml
vendored
@@ -1,45 +1,7 @@
|
||||
name: Create Docker images on Release
|
||||
name: Publish - Docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retag-latest:
|
||||
description: 'Retag latest Docker images'
|
||||
required: true
|
||||
type: choice
|
||||
default: "false"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
release-tag:
|
||||
description: 'Kestra Release Tag (by default, deduced with the ref)'
|
||||
required: false
|
||||
type: string
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: choice
|
||||
default: "true"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Plugin version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: string
|
||||
default: "true"
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
@@ -48,153 +10,79 @@ on:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
|
||||
jobs:
|
||||
plugins:
|
||||
name: List Plugins
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins
|
||||
with: # remap LATEST-SNAPSHOT to LATEST
|
||||
plugin-version: ${{ env.PLUGIN_VERSION == 'LATEST-SNAPSHOT' && 'LATEST' || env.PLUGIN_VERSION }}
|
||||
|
||||
# ********************************************************************************************************************
|
||||
# Build
|
||||
# ********************************************************************************************************************
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
if: ${{ inputs.force-download-artifact == 'true' }}
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
docker:
|
||||
name: Publish Docker
|
||||
needs: [ plugins, build-artifacts ]
|
||||
if: always()
|
||||
publish:
|
||||
name: Publish - Docker
|
||||
needs: build-artifacts
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: jattach
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python-is-python3 python3-pip curl jattach
|
||||
python-libs: kestra
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}-no-plugins
|
||||
packages: ""
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
plugins: ${{ needs.build-artifacts.outputs.plugins }}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Vars
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
if [[ "${{ inputs.release-tag }}" == "" ]]; then
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="${{ inputs.release-tag }}"
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
if [[ $TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
# this will remove the patch version number
|
||||
MINOR_SEMVER=${TAG%.*}
|
||||
echo "minor_semver=${MINOR_SEMVER}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Tag '$TAG' is not a valid semver (vMAJOR.MINOR.PATCH), skipping minor_semver"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
|
||||
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Download executable from artifact
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to DockerHub
|
||||
- name: Docker - Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Vars
|
||||
- name: Docker - Set image name
|
||||
shell: bash
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Docker - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Push to Docker Hub
|
||||
- name: Docker - Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
|
||||
|
||||
- name: Install regctl
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: regclient/actions/regctl-installer@main
|
||||
|
||||
- name: Retag to minor semver version
|
||||
if: startsWith(github.ref, 'refs/tags/v') && steps.vars.outputs.minor_semver != ''
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.minor_semver, matrix.image.name) }}
|
||||
|
||||
- name: Retag to latest
|
||||
if: startsWith(github.ref, 'refs/tags/v') && inputs.retag-latest == 'true'
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ':github-actions:'
|
||||
channel: 'C02DQ1A7JLR' # _int_git channel
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libraries }}
|
||||
|
||||
8
.github/workflows/workflow-publish-maven.yml
vendored
8
.github/workflows/workflow-publish-maven.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Setup build
|
||||
- name: Setup - Build
|
||||
@@ -39,8 +39,8 @@ jobs:
|
||||
- name: Publish - Release package to Maven Central
|
||||
shell: bash
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_mavenCentralUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_mavenCentralPassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToMavenCentral
|
||||
./gradlew publishToSonatype ${{ startsWith(github.ref, 'refs/tags/v') && 'closeAndReleaseSonatypeStagingRepository' || '' }}
|
||||
|
||||
# Gradle dependency
|
||||
- name: Java - Gradle dependency graph
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
name: Pull Request - Delete Docker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Pull Request - Delete Docker
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dataaxiom/ghcr-cleanup-action@v1
|
||||
with:
|
||||
package: kestra-pr
|
||||
delete-tags: ${{ github.event.pull_request.number }}
|
||||
@@ -1,78 +0,0 @@
|
||||
name: Pull Request - Publish Docker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
|
||||
publish:
|
||||
name: Publish Docker
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-artifacts
|
||||
env:
|
||||
GITHUB_IMAGE_PATH: "ghcr.io/kestra-io/kestra-pr"
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Docker - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
- name: Docker - Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.pr
|
||||
push: true
|
||||
tags: ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
# Add comment on pull request
|
||||
- name: Add comment to PR
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `**🐋 Docker image**: \`${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}\`\n` +
|
||||
`\n` +
|
||||
`\`\`\`bash\n` +
|
||||
`docker run --pull=always --rm -it -p 8080:8080 --user=root -v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }} server local\n` +
|
||||
`\`\`\``
|
||||
})
|
||||
37
.github/workflows/workflow-release.yml
vendored
37
.github/workflows/workflow-release.yml
vendored
@@ -1,22 +1,10 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
publish-docker:
|
||||
description: "Publish Docker image"
|
||||
default: 'false'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
@@ -42,28 +30,13 @@ on:
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "GH personnal Token."
|
||||
required: true
|
||||
SLACK_RELEASES_WEBHOOK_URL:
|
||||
description: "Slack webhook for releases channel."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
if: github.ref == 'refs/heads/develop' || inputs.publish-docker == 'true'
|
||||
with:
|
||||
force-download-artifact: 'false'
|
||||
plugin-version: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
@@ -77,9 +50,7 @@ jobs:
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
25
.github/workflows/workflow-test.yml
vendored
25
.github/workflows/workflow-test.yml
vendored
@@ -19,31 +19,8 @@ on:
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -52,8 +29,6 @@ jobs:
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -37,7 +37,6 @@ ui/coverage
|
||||
ui/stats.html
|
||||
ui/.frontend-gradle-plugin
|
||||
ui/utils/CHANGELOG.md
|
||||
ui/test-report.junit.xml
|
||||
|
||||
### Docker
|
||||
/.env
|
||||
@@ -58,5 +57,3 @@ core/src/main/resources/gradle.properties
|
||||
**/allure-results/*
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
/jmh-benchmarks/src/main/resources/gradle.properties
|
||||
|
||||
21
.plugins
21
.plugins
@@ -3,12 +3,10 @@
|
||||
# Format: <RepositoryName>:<GroupId>:<ArtifactId>:<Version>
|
||||
#
|
||||
# Uncomment the lines corresponding to the plugins to be installed:
|
||||
#plugin-ai:io.kestra.plugin:plugin-ai:LATEST
|
||||
#plugin-airbyte:io.kestra.plugin:plugin-airbyte:LATEST
|
||||
#plugin-airflow:io.kestra.plugin:plugin-airflow:LATEST
|
||||
#plugin-amqp:io.kestra.plugin:plugin-amqp:LATEST
|
||||
#plugin-ansible:io.kestra.plugin:plugin-ansible:LATEST
|
||||
#plugin-anthropic:io.kestra.plugin:plugin-anthropic:LATEST
|
||||
#plugin-aws:io.kestra.plugin:plugin-aws:LATEST
|
||||
#plugin-azure:io.kestra.plugin:plugin-azure:LATEST
|
||||
#plugin-cassandra:io.kestra.plugin:plugin-cassandra:LATEST
|
||||
@@ -19,7 +17,6 @@
|
||||
#plugin-databricks:io.kestra.plugin:plugin-databricks:LATEST
|
||||
#plugin-datahub:io.kestra.plugin:plugin-datahub:LATEST
|
||||
#plugin-dataform:io.kestra.plugin:plugin-dataform:LATEST
|
||||
#plugin-datagen:io.kestra.plugin:plugin-datagen:LATEST
|
||||
#plugin-dbt:io.kestra.plugin:plugin-dbt:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-db2:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-mongodb:LATEST
|
||||
@@ -27,22 +24,17 @@
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-oracle:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-postgres:LATEST
|
||||
#plugin-debezium:io.kestra.plugin:plugin-debezium-sqlserver:LATEST
|
||||
#plugin-deepseek:io.kestra.plugin:plugin-deepseek:LATEST
|
||||
#plugin-docker:io.kestra.plugin:plugin-docker:LATEST
|
||||
#plugin-elasticsearch:io.kestra.plugin:plugin-elasticsearch:LATEST
|
||||
#plugin-fivetran:io.kestra.plugin:plugin-fivetran:LATEST
|
||||
#plugin-fs:io.kestra.plugin:plugin-fs:LATEST
|
||||
#plugin-gcp:io.kestra.plugin:plugin-gcp:LATEST
|
||||
#plugin-gemini:io.kestra.plugin:plugin-gemini:LATEST
|
||||
#plugin-git:io.kestra.plugin:plugin-git:LATEST
|
||||
#plugin-github:io.kestra.plugin:plugin-github:LATEST
|
||||
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
|
||||
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
|
||||
#plugin-graphql:io.kestra.plugin:plugin-graphql:LATEST
|
||||
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
|
||||
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
|
||||
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
|
||||
#plugin-influxdb:io.kestra.plugin:plugin-influxdb:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-as400:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-db2:LATEST
|
||||
@@ -63,43 +55,31 @@
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlite:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
|
||||
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
|
||||
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
|
||||
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
|
||||
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
|
||||
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
|
||||
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
|
||||
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
|
||||
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
|
||||
#plugin-meilisearch:io.kestra.plugin:plugin-meilisearch:LATEST
|
||||
#plugin-minio:io.kestra.plugin:plugin-minio:LATEST
|
||||
#plugin-mistral:io.kestra.plugin:plugin-mistral:LATEST
|
||||
#plugin-modal:io.kestra.plugin:plugin-modal:LATEST
|
||||
#plugin-mongodb:io.kestra.plugin:plugin-mongodb:LATEST
|
||||
#plugin-mqtt:io.kestra.plugin:plugin-mqtt:LATEST
|
||||
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
|
||||
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
|
||||
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
|
||||
#plugin-notion:io.kestra.plugin:plugin-notion:LATEST
|
||||
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
|
||||
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
|
||||
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
|
||||
#plugin-perplexity:io.kestra.plugin:plugin-perplexity:LATEST
|
||||
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
|
||||
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
|
||||
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-bun:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-deno:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-jython:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-lua:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-nashorn:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-node:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-perl:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-php:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-powershell:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-python:LATEST
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-r:LATEST
|
||||
@@ -107,7 +87,6 @@
|
||||
#plugin-scripts:io.kestra.plugin:plugin-script-shell:LATEST
|
||||
#plugin-serdes:io.kestra.plugin:plugin-serdes:LATEST
|
||||
#plugin-servicenow:io.kestra.plugin:plugin-servicenow:LATEST
|
||||
#plugin-sifflet:io.kestra.plugin:plugin-sifflet:LATEST
|
||||
#plugin-singer:io.kestra.plugin:plugin-singer:LATEST
|
||||
#plugin-soda:io.kestra.plugin:plugin-soda:LATEST
|
||||
#plugin-solace:io.kestra.plugin:plugin-solace:LATEST
|
||||
|
||||
305
AGENTS.md
305
AGENTS.md
@@ -1,305 +0,0 @@
|
||||
# Kestra AGENTS.md
|
||||
|
||||
This file provides guidance for AI coding agents working on the Kestra project. Kestra is an open-source data orchestration and scheduling platform built with Java (Micronaut) and Vue.js.
|
||||
|
||||
## Repository Layout
|
||||
|
||||
- **`core/`**: Core Kestra framework and task definitions
|
||||
- **`cli/`**: Command-line interface and server implementation
|
||||
- **`webserver/`**: REST API server implementation
|
||||
- **`ui/`**: Vue.js frontend application
|
||||
- **`jdbc-*`**: Database connector modules (H2, MySQL, PostgreSQL)
|
||||
- **`script/`**: Script execution engine
|
||||
- **`storage-local/`**: Local file storage implementation
|
||||
- **`repository-memory/`**: In-memory repository implementation
|
||||
- **`runner-memory/`**: In-memory execution runner
|
||||
- **`processor/`**: Task processing engine
|
||||
- **`model/`**: Data models and Data Transfer Objects
|
||||
- **`platform/`**: Platform-specific implementations
|
||||
- **`tests/`**: Integration test framework
|
||||
- **`e2e-tests/`**: End-to-end testing suite
|
||||
|
||||
## Development Environment
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Java 21+
|
||||
- Node.js 22+ and npm
|
||||
- Python 3, pip, and python venv
|
||||
- Docker & Docker Compose
|
||||
- Gradle (wrapper included)
|
||||
|
||||
### Quick Setup with Devcontainer
|
||||
|
||||
The easiest way to get started is using the provided devcontainer:
|
||||
|
||||
1. Install VSCode Remote Development extension
|
||||
2. Run `Dev Containers: Open Folder in Container...` from command palette
|
||||
3. Select the Kestra root folder
|
||||
4. Wait for Gradle build to complete
|
||||
|
||||
### Manual Setup
|
||||
|
||||
1. Clone the repository
|
||||
2. Run `./gradlew build` to build the backend
|
||||
3. Navigate to `ui/` and run `npm install`
|
||||
4. Create configuration files as described below
|
||||
|
||||
## Configuration Files
|
||||
|
||||
### Backend Configuration
|
||||
|
||||
Create `cli/src/main/resources/application-override.yml`:
|
||||
|
||||
**Local Mode (H2 database):**
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
**Standalone Mode (PostgreSQL):**
|
||||
|
||||
```yaml
|
||||
kestra:
|
||||
repository:
|
||||
type: postgres
|
||||
storage:
|
||||
type: local
|
||||
local:
|
||||
base-path: "/app/storage"
|
||||
queue:
|
||||
type: postgres
|
||||
tasks:
|
||||
tmp-dir:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
url: jdbc:postgresql://host.docker.internal:5432/kestra
|
||||
driverClassName: org.postgresql.Driver
|
||||
username: kestra
|
||||
password: k3str4
|
||||
|
||||
flyway:
|
||||
datasources:
|
||||
postgres:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
### Frontend Configuration
|
||||
|
||||
Create `ui/.env.development.local` for environment variables.
|
||||
|
||||
## Running the Application
|
||||
|
||||
### Backend
|
||||
|
||||
- **Local mode**: `./gradlew runLocal` (uses H2 database)
|
||||
- **Standalone mode**: Use VSCode Run and Debug with main class `io.kestra.cli.App` and args `server standalone`
|
||||
|
||||
### Frontend
|
||||
|
||||
- Navigate to `ui/` directory
|
||||
- Run `npm run dev` for development server (port 5173)
|
||||
- Run `npm run build` for production build
|
||||
|
||||
## Building and Testing
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
# Build the project
|
||||
./gradlew build
|
||||
|
||||
# Run tests
|
||||
./gradlew test
|
||||
|
||||
# Run specific module tests
|
||||
./gradlew :core:test
|
||||
|
||||
# Clean build
|
||||
./gradlew clean build
|
||||
```
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd ui
|
||||
npm install
|
||||
npm run test
|
||||
npm run lint
|
||||
npm run build
|
||||
```
|
||||
|
||||
### End-to-End Tests
|
||||
|
||||
```bash
|
||||
# Build and start E2E tests
|
||||
./build-and-start-e2e-tests.sh
|
||||
|
||||
# Or use the Makefile
|
||||
make install
|
||||
make install-plugins
|
||||
make start-standalone-postgres
|
||||
```
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
### Java Backend
|
||||
|
||||
- Use Java 21 features
|
||||
- Follow Micronaut framework patterns
|
||||
- Add Swagger annotations for API documentation
|
||||
- Use annotation processors (enable in IDE)
|
||||
- Set `MICRONAUT_ENVIRONMENTS=local,override` for custom config
|
||||
- Set `KESTRA_PLUGINS_PATH` for custom plugin loading
|
||||
|
||||
### Vue.js Frontend
|
||||
|
||||
- Vue 3 with Composition API
|
||||
- TypeScript for type safety
|
||||
- Vite for build tooling
|
||||
- ESLint and Prettier for code quality
|
||||
- Component-based architecture in `src/components/`
|
||||
|
||||
### Code Style
|
||||
|
||||
- Follow `.editorconfig` settings
|
||||
- Use 4 spaces for Java, 2 spaces for YAML/JSON/CSS
|
||||
- Enable format on save in VSCode
|
||||
- Use Prettier for frontend code formatting
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Backend Testing
|
||||
|
||||
- Unit tests in `src/test/java/`
|
||||
- Integration tests in `tests/` module
|
||||
- Use Micronaut test framework
|
||||
- Test both local and standalone modes
|
||||
|
||||
### Frontend Testing
|
||||
- Unit tests with Jest
|
||||
- E2E tests with Playwright
|
||||
- Component testing with Storybook
|
||||
- Run `npm run test:unit` and `npm run test:e2e`
|
||||
|
||||
## Plugin Development
|
||||
|
||||
### Creating Plugins
|
||||
|
||||
- Follow the [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/)
|
||||
- Place JAR files in `KESTRA_PLUGINS_PATH`
|
||||
- Use the plugin template structure
|
||||
- Test with both local and standalone modes
|
||||
|
||||
### Plugin Loading
|
||||
|
||||
- Set `KESTRA_PLUGINS_PATH` environment variable
|
||||
- Use devcontainer mounts for local development
|
||||
- Plugins are loaded at startup
|
||||
|
||||
## Common Issues and Solutions
|
||||
|
||||
### JavaScript Heap Out of Memory
|
||||
|
||||
Set `NODE_OPTIONS=--max-old-space-size=4096` environment variable.
|
||||
|
||||
### CORS Issues
|
||||
|
||||
Ensure backend CORS is configured for `http://localhost:5173` when using frontend dev server.
|
||||
|
||||
### Database Connection Issues
|
||||
|
||||
- Use `host.docker.internal` instead of `localhost` when connecting from devcontainer
|
||||
- Verify PostgreSQL is running and accessible
|
||||
- Check database credentials and permissions
|
||||
|
||||
### Gradle Build Issues
|
||||
|
||||
- Clear Gradle cache: `./gradlew clean`
|
||||
- Check Java version compatibility
|
||||
- Verify all dependencies are available
|
||||
|
||||
## Pull Request Guidelines
|
||||
|
||||
### Before Submitting
|
||||
|
||||
1. Run all tests: `./gradlew test` and `npm test`
|
||||
2. Check code formatting: `./gradlew spotlessCheck`
|
||||
3. Verify CORS configuration if changing API
|
||||
4. Test both local and standalone modes
|
||||
5. Update documentation for user-facing changes
|
||||
|
||||
### Commit Messages
|
||||
|
||||
- Follow conventional commit format
|
||||
- Use present tense ("Add feature" not "Added feature")
|
||||
- Reference issue numbers when applicable
|
||||
- Keep commits focused and atomic
|
||||
|
||||
### Review Checklist
|
||||
|
||||
- [ ] All tests pass
|
||||
- [ ] Code follows project style guidelines
|
||||
- [ ] Documentation is updated
|
||||
- [ ] No breaking changes without migration guide
|
||||
- [ ] CORS properly configured if API changes
|
||||
- [ ] Both local and standalone modes tested
|
||||
|
||||
## Useful Commands
|
||||
|
||||
```bash
|
||||
# Quick development commands
|
||||
./gradlew runLocal # Start local backend
|
||||
./gradlew :ui:build # Build frontend
|
||||
./gradlew clean build # Clean rebuild
|
||||
npm run dev # Start frontend dev server
|
||||
make install # Install Kestra locally
|
||||
make start-standalone-postgres # Start with PostgreSQL
|
||||
|
||||
# Testing commands
|
||||
./gradlew test # Run all backend tests
|
||||
./gradlew :core:test # Run specific module tests
|
||||
npm run test # Run frontend tests
|
||||
npm run lint # Lint frontend code
|
||||
```
|
||||
|
||||
## Getting Help
|
||||
|
||||
- Open a [GitHub issue](https://github.com/kestra-io/kestra/issues)
|
||||
- Join the [Kestra Slack community](https://kestra.io/slack)
|
||||
- Check the [main documentation](https://kestra.io/docs)
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `MICRONAUT_ENVIRONMENTS` | Custom config environments | `local,override` |
|
||||
| `KESTRA_PLUGINS_PATH` | Path to custom plugins | `/workspaces/kestra/local/plugins` |
|
||||
| `NODE_OPTIONS` | Node.js options | `--max-old-space-size=4096` |
|
||||
| `JAVA_HOME` | Java installation path | `/usr/java/jdk-21` |
|
||||
|
||||
Remember: Always test your changes in both local and standalone modes, and ensure CORS is properly configured for frontend development.
|
||||
@@ -16,9 +16,8 @@ RUN apt-get update -y && \
|
||||
if [ -n "${APT_PACKAGES}" ]; then apt-get install -y --no-install-recommends ${APT_PACKAGES}; fi && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /var/tmp/* /tmp/* && \
|
||||
curl -LsSf https://astral.sh/uv/0.6.17/install.sh | sh && mv /root/.local/bin/uv /bin && mv /root/.local/bin/uvx /bin && \
|
||||
if [ -n "${KESTRA_PLUGINS}" ]; then /app/kestra plugins install ${KESTRA_PLUGINS} && rm -rf /tmp/*; fi && \
|
||||
if [ -n "${PYTHON_LIBRARIES}" ]; then uv pip install --system ${PYTHON_LIBRARIES}; fi && \
|
||||
if [ -n "${PYTHON_LIBRARIES}" ]; then pip install ${PYTHON_LIBRARIES}; fi && \
|
||||
chown -R kestra:kestra /app
|
||||
|
||||
USER kestra
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
FROM kestra/kestra:develop
|
||||
|
||||
USER root
|
||||
|
||||
COPY --chown=kestra:kestra docker /
|
||||
|
||||
USER kestra
|
||||
27
Makefile
27
Makefile
@@ -77,7 +77,7 @@ install-plugins:
|
||||
else \
|
||||
${KESTRA_BASEDIR}/bin/kestra plugins install $$CURRENT_PLUGIN \
|
||||
--plugins ${KESTRA_BASEDIR}/plugins \
|
||||
--repositories=https://central.sonatype.com/repository/maven-snapshots || exit 1; \
|
||||
--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots || exit 1; \
|
||||
fi \
|
||||
done < $$PLUGIN_LIST
|
||||
|
||||
@@ -89,7 +89,7 @@ build-docker: build-exec
|
||||
--compress \
|
||||
--rm \
|
||||
-f ./Dockerfile \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip" \
|
||||
--build-arg="PYTHON_LIBRARIES=kestra" \
|
||||
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
|
||||
|
||||
@@ -130,6 +130,9 @@ datasources:
|
||||
username: kestra
|
||||
password: k3str4
|
||||
kestra:
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
encryption:
|
||||
secret-key: 3ywuDa/Ec61VHkOX3RlI9gYq7CaD0mv0Pf3DHtAXA6U=
|
||||
repository:
|
||||
@@ -178,8 +181,8 @@ clone-plugins:
|
||||
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
|
||||
@mkdir -p "$(PLUGIN_GIT_DIR)"
|
||||
@echo "Fetching repository list from GitHub..."
|
||||
@REPOS=$$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-"); \
|
||||
for repo in $$REPOS; do \
|
||||
@REPOS=$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-") \
|
||||
for repo in $$REPOS; do \
|
||||
if [[ $$repo == plugin-* ]]; then \
|
||||
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
|
||||
echo "Skipping: $$repo (Already cloned)"; \
|
||||
@@ -191,22 +194,6 @@ clone-plugins:
|
||||
done
|
||||
@echo "Done!"
|
||||
|
||||
# Pull every plugins in main or master branch
|
||||
pull-plugins:
|
||||
@echo "🔍 Pulling repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
@for repo in "$(PLUGIN_GIT_DIR)"/*; do \
|
||||
if [ -d "$$repo/.git" ]; then \
|
||||
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
|
||||
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
|
||||
echo "🔄 Pulling: $$(basename "$$repo") (branch: $$branch)"; \
|
||||
git -C "$$repo" pull; \
|
||||
else \
|
||||
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch, currently on $$branch)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done
|
||||
@echo "✅ Done pulling!"
|
||||
|
||||
# Update all plugins jar
|
||||
build-plugins:
|
||||
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
|
||||
13
README.md
13
README.md
@@ -24,13 +24,6 @@
|
||||
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://trendshift.io/repositories/2714" target="_blank">
|
||||
<img src="https://trendshift.io/api/badge/repositories/2714" alt="kestra-io%2Fkestra | Trendshift" width="250" height="55"/>
|
||||
</a>
|
||||
<a href="https://www.producthunt.com/posts/kestra?embed=true&utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-kestra" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=612077&theme=light&period=daily&t=1740737506162" alt="Kestra - All-in-one automation & orchestration platform | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://go.kestra.io/video/product-overview" target="_blank">
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
|
||||
@@ -54,7 +47,7 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
- **Structure & Resilience**: tame chaos and bring resilience to your workflows with **namespaces**, **labels**, **subflows**, **retries**, **timeout**, **error handling**, **inputs**, **outputs** that generate artifacts in the UI, **variables**, **conditional branching**, **advanced scheduling**, **event triggers**, **backfills**, **dynamic tasks**, **sequential and parallel tasks**, and skip tasks or triggers when needed by setting the flag `disabled` to `true`.
|
||||
|
||||
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
|
||||
|
||||
<p align="center">
|
||||
@@ -65,6 +58,10 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Try the Live Demo
|
||||
|
||||
Try Kestra with our [**Live Demo**](https://demo.kestra.io/ui/login?auto). No installation required!
|
||||
|
||||
### Get Started Locally in 5 Minutes
|
||||
|
||||
#### Launch Kestra in Docker
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# E2E main script that can be run on a dev computer or in the CI
|
||||
# it will build the backend of the current git repo and the frontend
|
||||
# create a docker image out of it
|
||||
# run tests on this image
|
||||
|
||||
|
||||
LOCAL_IMAGE_VERSION="local-e2e-$(date +%s)"
|
||||
|
||||
echo "Running E2E"
|
||||
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
|
||||
start_time=$(date +%s)
|
||||
|
||||
echo ""
|
||||
echo "Building the image for this current repository"
|
||||
make clean
|
||||
make build-docker VERSION=$LOCAL_IMAGE_VERSION
|
||||
|
||||
end_time=$(date +%s)
|
||||
elapsed=$(( end_time - start_time ))
|
||||
|
||||
echo ""
|
||||
echo "building elapsed time: ${elapsed} seconds"
|
||||
echo ""
|
||||
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
|
||||
start_time2=$(date +%s)
|
||||
|
||||
echo "cd ./ui"
|
||||
cd ./ui
|
||||
echo "npm i"
|
||||
npm i
|
||||
|
||||
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
|
||||
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"
|
||||
|
||||
end_time2=$(date +%s)
|
||||
elapsed2=$(( end_time2 - start_time2 ))
|
||||
echo ""
|
||||
echo "Tests elapsed time: ${elapsed2} seconds"
|
||||
echo ""
|
||||
total_elapsed=$(( elapsed + elapsed2 ))
|
||||
echo "Total elapsed time: ${total_elapsed} seconds"
|
||||
echo ""
|
||||
|
||||
exit 0
|
||||
337
build.gradle
337
build.gradle
@@ -16,12 +16,12 @@ plugins {
|
||||
id "java"
|
||||
id 'java-library'
|
||||
id "idea"
|
||||
id "com.gradleup.shadow" version "8.3.9"
|
||||
id "com.gradleup.shadow" version "8.3.6"
|
||||
id "application"
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "6.2.0.5505"
|
||||
id "org.sonarqube" version "6.0.1.5171"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
@@ -31,13 +31,15 @@ plugins {
|
||||
id 'com.github.node-gradle.node' version '7.1.0'
|
||||
|
||||
// release
|
||||
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
|
||||
id 'net.researchgate.release' version '3.1.0'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.2"
|
||||
id "com.gorylenko.gradle-git-properties" version "2.4.2"
|
||||
id 'signing'
|
||||
id "com.vanniktech.maven.publish" version "0.34.0"
|
||||
id 'ru.vyarus.pom' version '3.0.0' apply false
|
||||
id 'ru.vyarus.github-info' version '2.0.0' apply false
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.1.3" apply false
|
||||
id "org.owasp.dependencycheck" version "12.0.2" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -71,13 +73,8 @@ dependencies {
|
||||
* Dependencies
|
||||
**********************************************************************************************************************/
|
||||
allprojects {
|
||||
|
||||
tasks.withType(GenerateModuleMetadata).configureEach {
|
||||
suppressedValidationErrors.add('enforced-platform')
|
||||
}
|
||||
|
||||
if (it.name != 'platform') {
|
||||
group = "io.kestra"
|
||||
group "io.kestra"
|
||||
|
||||
java {
|
||||
sourceCompatibility = targetJavaVersion
|
||||
@@ -124,6 +121,7 @@ allprojects {
|
||||
micronaut "io.micronaut:micronaut-management"
|
||||
micronaut "io.micrometer:micrometer-core"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
micronaut "io.micronaut:micronaut-http-client"
|
||||
micronaut "io.micronaut.reactor:micronaut-reactor-http-client"
|
||||
micronaut "io.micronaut.tracing:micronaut-tracing-opentelemetry-http"
|
||||
@@ -148,7 +146,6 @@ allprojects {
|
||||
implementation group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names'
|
||||
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-guava'
|
||||
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310'
|
||||
implementation group: 'com.fasterxml.uuid', name: 'java-uuid-generator'
|
||||
|
||||
// kestra
|
||||
implementation group: 'com.devskiller.friendly-id', name: 'friendly-id'
|
||||
@@ -169,7 +166,7 @@ allprojects {
|
||||
* Test
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
apply plugin: "com.adarshr.test-logger"
|
||||
|
||||
java {
|
||||
@@ -200,9 +197,6 @@ subprojects {
|
||||
testImplementation 'org.hamcrest:hamcrest'
|
||||
testImplementation 'org.hamcrest:hamcrest-library'
|
||||
testImplementation 'org.exparity:hamcrest-date'
|
||||
|
||||
//assertj
|
||||
testImplementation 'org.assertj:assertj-core'
|
||||
}
|
||||
|
||||
test {
|
||||
@@ -220,19 +214,19 @@ subprojects {
|
||||
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
|
||||
environment 'SECRET_NON_B64_SECRET', "some secret value"
|
||||
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
|
||||
environment 'ENV_TEST1', "true"
|
||||
environment 'ENV_TEST2', "Pass by env"
|
||||
environment 'KESTRA_TEST1', "true"
|
||||
environment 'KESTRA_TEST2', "Pass by env"
|
||||
}
|
||||
|
||||
testlogger {
|
||||
theme = 'mocha-parallel'
|
||||
showExceptions = true
|
||||
showFullStackTraces = true
|
||||
showCauses = true
|
||||
slowThreshold = 2000
|
||||
showStandardStreams = true
|
||||
showPassedStandardStreams = false
|
||||
showSkippedStandardStreams = true
|
||||
theme 'mocha-parallel'
|
||||
showExceptions true
|
||||
showFullStackTraces true
|
||||
showCauses true
|
||||
slowThreshold 2000
|
||||
showStandardStreams true
|
||||
showPassedStandardStreams false
|
||||
showSkippedStandardStreams true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -272,7 +266,7 @@ subprojects {
|
||||
* Allure Reports
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
dependencies {
|
||||
testImplementation platform("io.qameta.allure:allure-bom")
|
||||
testImplementation "io.qameta.allure:allure-junit5"
|
||||
@@ -286,7 +280,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.24"
|
||||
agent "org.aspectj:aspectjweaver:1.9.22.1"
|
||||
}
|
||||
|
||||
test {
|
||||
@@ -299,7 +293,7 @@ subprojects {
|
||||
* Jacoco
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
if (it.name != 'platform') {
|
||||
apply plugin: 'jacoco'
|
||||
|
||||
test {
|
||||
@@ -410,7 +404,7 @@ jar {
|
||||
shadowJar {
|
||||
archiveClassifier.set(null)
|
||||
mergeServiceFiles()
|
||||
zip64 = true
|
||||
zip64 true
|
||||
}
|
||||
|
||||
distZip.dependsOn shadowJar
|
||||
@@ -418,7 +412,6 @@ distTar.dependsOn shadowJar
|
||||
startScripts.dependsOn shadowJar
|
||||
startShadowScripts.dependsOn jar
|
||||
shadowJar.dependsOn 'ui:assembleFrontend'
|
||||
shadowJar.dependsOn jar
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
* Executable Jar
|
||||
@@ -427,8 +420,8 @@ def executableDir = layout.buildDirectory.dir("executable")
|
||||
def executable = layout.buildDirectory.file("executable/${project.name}-${project.version}").get().asFile
|
||||
|
||||
tasks.register('writeExecutableJar') {
|
||||
group = "build"
|
||||
description = "Write an executable jar from shadow jar"
|
||||
group "build"
|
||||
description "Write an executable jar from shadow jar"
|
||||
dependsOn = [shadowJar]
|
||||
|
||||
final shadowJarFile = tasks.shadowJar.outputs.files.singleFile
|
||||
@@ -454,8 +447,8 @@ tasks.register('writeExecutableJar') {
|
||||
}
|
||||
|
||||
tasks.register('executableJar', Zip) {
|
||||
group = "build"
|
||||
description = "Zip the executable jar"
|
||||
group "build"
|
||||
description "Zip the executable jar"
|
||||
dependsOn = [writeExecutableJar]
|
||||
|
||||
archiveFileName = "${project.name}-${project.version}.zip"
|
||||
@@ -477,174 +470,115 @@ tasks.register('runLocal', JavaExec) {
|
||||
args 'server', 'local', '--plugins', 'local/plugins'
|
||||
}
|
||||
|
||||
tasks.register('runStandalone', JavaExec) {
|
||||
group = "application"
|
||||
description = "Run Kestra as server local"
|
||||
classpath = project(":cli").sourceSets.main.runtimeClasspath
|
||||
mainClass = mainClassName
|
||||
environment 'MICRONAUT_ENVIRONMENTS', 'override'
|
||||
args 'server', 'standalone', '--plugins', 'local/plugins'
|
||||
}
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
* Publish
|
||||
**********************************************************************************************************************/
|
||||
subprojects {subProject ->
|
||||
|
||||
if (subProject.name != 'jmh-benchmarks' && subProject.name != rootProject.name) {
|
||||
apply plugin: 'signing'
|
||||
apply plugin: "com.vanniktech.maven.publish"
|
||||
|
||||
javadoc {
|
||||
options {
|
||||
locale = 'en_US'
|
||||
encoding = 'UTF-8'
|
||||
addStringOption("Xdoclint:none", "-quiet")
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('sourcesJar', Jar) {
|
||||
dependsOn = [':core:copyGradleProperties']
|
||||
dependsOn = [':ui:assembleFrontend']
|
||||
archiveClassifier.set('sources')
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
sourcesJar.dependsOn ':core:copyGradleProperties'
|
||||
sourcesJar.dependsOn ':ui:assembleFrontend'
|
||||
|
||||
tasks.register('javadocJar', Jar) {
|
||||
archiveClassifier.set('javadoc')
|
||||
from javadoc
|
||||
}
|
||||
|
||||
tasks.register('testsJar', Jar) {
|
||||
group = 'build'
|
||||
description = 'Build the tests jar'
|
||||
|
||||
archiveClassifier.set('tests')
|
||||
if (sourceSets.matching { it.name == 'test'}) {
|
||||
from sourceSets.named('test').get().output
|
||||
}
|
||||
}
|
||||
|
||||
//These modules should not be published
|
||||
def unpublishedModules = ["jdbc-mysql", "jdbc-postgres", "webserver"]
|
||||
if (subProject.name in unpublishedModules){
|
||||
return
|
||||
}
|
||||
|
||||
mavenPublishing {
|
||||
publishToMavenCentral(true)
|
||||
signAllPublications()
|
||||
|
||||
coordinates(
|
||||
"${rootProject.group}",
|
||||
subProject.name == "cli" ? rootProject.name : subProject.name,
|
||||
"${rootProject.version}"
|
||||
)
|
||||
|
||||
pom {
|
||||
name = project.name
|
||||
description = "${project.group}:${project.name}:${rootProject.version}"
|
||||
url = "https://github.com/kestra-io/${rootProject.name}"
|
||||
|
||||
licenses {
|
||||
license {
|
||||
name = "The Apache License, Version 2.0"
|
||||
url = "http://www.apache.org/licenses/LICENSE-2.0.txt"
|
||||
}
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id = "tchiotludo"
|
||||
name = "Ludovic Dehon"
|
||||
email = "ldehon@kestra.io"
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection = 'scm:git:'
|
||||
url = "https://github.com/kestra-io/${rootProject.name}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
afterEvaluate {
|
||||
publishing {
|
||||
publications {
|
||||
withType(MavenPublication).configureEach { publication ->
|
||||
|
||||
if (subProject.name == "platform") {
|
||||
// Clear all artifacts except the BOM
|
||||
publication.artifacts.clear()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subProject.name == 'cli') {
|
||||
|
||||
/* Make sure the special publication is wired *after* every plugin */
|
||||
subProject.afterEvaluate {
|
||||
/* 1. Remove the default java component so Gradle stops expecting
|
||||
the standard cli-*.jar, sources, javadoc, etc. */
|
||||
components.removeAll { it.name == "java" }
|
||||
|
||||
/* 2. Replace the publication’s artifacts with shadow + exec */
|
||||
publishing.publications.withType(MavenPublication).configureEach { pub ->
|
||||
pub.artifacts.clear()
|
||||
|
||||
// main shadow JAR built at root
|
||||
pub.artifact(rootProject.tasks.named("shadowJar").get()) {
|
||||
extension = "jar"
|
||||
}
|
||||
|
||||
// executable ZIP built at root
|
||||
pub.artifact(rootProject.tasks.named("executableJar").get().archiveFile) {
|
||||
classifier = "exec"
|
||||
extension = "zip"
|
||||
}
|
||||
pub.artifact(tasks.named("sourcesJar").get())
|
||||
pub.artifact(tasks.named("javadocJar").get())
|
||||
|
||||
}
|
||||
|
||||
/* 3. Disable Gradle-module metadata for this publication to
|
||||
avoid the “artifact removed from java component” error. */
|
||||
tasks.withType(GenerateModuleMetadata).configureEach { it.enabled = false }
|
||||
|
||||
/* 4. Make every publish task in :cli wait for the two artifacts */
|
||||
tasks.matching { it.name.startsWith("publish") }.configureEach {
|
||||
dependsOn rootProject.tasks.named("shadowJar")
|
||||
dependsOn rootProject.tasks.named("executableJar")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subProject.name != 'platform' && subProject.name != 'cli') {
|
||||
// only if a test source set actually exists (avoids empty artifacts)
|
||||
def hasTests = subProject.extensions.findByName('sourceSets')?.findByName('test') != null
|
||||
|
||||
if (hasTests) {
|
||||
// wire the artifact onto every Maven publication of this subproject
|
||||
publishing {
|
||||
publications {
|
||||
withType(MavenPublication).configureEach { pub ->
|
||||
// keep the normal java component + sources/javadoc already configured
|
||||
pub.artifact(subProject.tasks.named('testsJar').get())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// make sure publish tasks build the tests jar first
|
||||
tasks.matching { it.name.startsWith('publish') }.configureEach {
|
||||
dependsOn subProject.tasks.named('testsJar')
|
||||
}
|
||||
}
|
||||
nexusPublishing {
|
||||
repositoryDescription = "${project.group}:${rootProject.name}:${project.version}"
|
||||
useStaging = !project.version.endsWith("-SNAPSHOT")
|
||||
repositories {
|
||||
sonatype {
|
||||
nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/"))
|
||||
snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply plugin: "maven-publish"
|
||||
apply plugin: 'signing'
|
||||
apply plugin: 'ru.vyarus.pom'
|
||||
apply plugin: 'ru.vyarus.github-info'
|
||||
|
||||
javadoc {
|
||||
options {
|
||||
locale = 'en_US'
|
||||
encoding = 'UTF-8'
|
||||
addStringOption("Xdoclint:none", "-quiet")
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('sourcesJar', Jar) {
|
||||
dependsOn = [':core:copyGradleProperties']
|
||||
dependsOn = [':ui:assembleFrontend']
|
||||
archiveClassifier.set('sources')
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
sourcesJar.dependsOn ':core:copyGradleProperties'
|
||||
sourcesJar.dependsOn ':ui:assembleFrontend'
|
||||
|
||||
tasks.register('javadocJar', Jar) {
|
||||
archiveClassifier.set('javadoc')
|
||||
from javadoc
|
||||
}
|
||||
|
||||
tasks.register('testsJar', Jar) {
|
||||
group = 'build'
|
||||
description = 'Build the tests jar'
|
||||
|
||||
archiveClassifier.set('tests')
|
||||
if (sourceSets.matching { it.name == 'test'}) {
|
||||
from sourceSets.named('test').get().output
|
||||
}
|
||||
}
|
||||
|
||||
github {
|
||||
user 'kestra-io'
|
||||
license 'Apache'
|
||||
repository 'kestra'
|
||||
site 'https://kestra.io'
|
||||
}
|
||||
|
||||
maven.pom {
|
||||
description = 'The modern, scalable orchestrator & scheduler open source platform'
|
||||
|
||||
developers {
|
||||
developer {
|
||||
id = "tchiotludo"
|
||||
name = "Ludovic Dehon"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
sonatypePublication(MavenPublication) {
|
||||
version project.version
|
||||
|
||||
if (project.name.contains('cli')) {
|
||||
groupId "io.kestra"
|
||||
artifactId "kestra"
|
||||
|
||||
artifact shadowJar
|
||||
artifact executableJar
|
||||
} else if (project.name.contains('platform')){
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
} else {
|
||||
from components.java
|
||||
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
|
||||
artifact sourcesJar
|
||||
artifact javadocJar
|
||||
artifact testsJar
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
signing {
|
||||
// only sign JARs that we publish to Sonatype
|
||||
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
|
||||
sign publishing.publications.sonatypePublication
|
||||
}
|
||||
|
||||
tasks.withType(GenerateModuleMetadata).configureEach {
|
||||
// Suppression this validation error as we want to enforce the Kestra platform
|
||||
suppressedValidationErrors.add('enforced-platform')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**********************************************************************************************************************\
|
||||
@@ -663,10 +597,11 @@ release {
|
||||
}
|
||||
|
||||
// Dynamically set properties with default values
|
||||
failOnSnapshotDependencies = providers.gradleProperty("release.failOnSnapshotDependencies")
|
||||
.map(val -> Boolean.parseBoolean(val))
|
||||
.getOrElse(true)
|
||||
failOnSnapshotDependencies = (project.hasProperty('release.failOnSnapshotDependencies')
|
||||
? project.property('release.failOnSnapshotDependencies').toBoolean()
|
||||
: true)
|
||||
|
||||
pushReleaseVersionBranch = providers.gradleProperty("release.pushReleaseVersionBranch")
|
||||
.getOrElse(null)
|
||||
pushReleaseVersionBranch = (project.hasProperty('release.pushReleaseVersionBranch')
|
||||
? project.property('release.pushReleaseVersionBranch').toString()
|
||||
: null)
|
||||
}
|
||||
|
||||
@@ -12,9 +12,18 @@ dependencies {
|
||||
implementation 'ch.qos.logback.contrib:logback-json-classic'
|
||||
implementation 'ch.qos.logback.contrib:logback-jackson'
|
||||
|
||||
// OTLP metrics
|
||||
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
|
||||
// plugins
|
||||
implementation 'org.eclipse.aether:aether-api'
|
||||
implementation 'org.eclipse.aether:aether-spi'
|
||||
implementation 'org.eclipse.aether:aether-util'
|
||||
implementation 'org.eclipse.aether:aether-impl'
|
||||
implementation 'org.eclipse.aether:aether-connector-basic'
|
||||
implementation 'org.eclipse.aether:aether-transport-file'
|
||||
implementation 'org.eclipse.aether:aether-transport-http'
|
||||
implementation('org.apache.maven:maven-aether-provider') {
|
||||
// sisu dependency injector is not used
|
||||
exclude group: 'org.eclipse.sisu'
|
||||
}
|
||||
// aether still use javax.inject
|
||||
compileOnly 'javax.inject:javax.inject:1'
|
||||
|
||||
@@ -34,7 +43,4 @@ dependencies {
|
||||
implementation project(":storage-local")
|
||||
|
||||
implementation project(":webserver")
|
||||
|
||||
//test
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,15 +14,16 @@ import io.micronaut.http.netty.body.NettyJsonHandler;
|
||||
import io.micronaut.json.JsonMapper;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
import picocli.CommandLine;
|
||||
|
||||
public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@CommandLine.Option(names = {"--server"}, description = "Kestra server url", defaultValue = "http://localhost:8080")
|
||||
@@ -34,7 +35,7 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@CommandLine.Option(names = {"--user"}, paramLabel = "<user:password>", description = "Server user and password")
|
||||
protected String user;
|
||||
|
||||
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only)")
|
||||
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only, when multi-tenancy is enabled)")
|
||||
protected String tenantId;
|
||||
|
||||
@CommandLine.Option(names = {"--api-token"}, description = "API Token (EE only).")
|
||||
@@ -45,18 +46,8 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@Nullable
|
||||
private HttpClientConfiguration httpClientConfiguration;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected DefaultHttpClient client() throws URISyntaxException {
|
||||
DefaultHttpClient defaultHttpClient = DefaultHttpClient.builder()
|
||||
.uri(server.toURI())
|
||||
.configuration(httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration())
|
||||
.build();
|
||||
DefaultHttpClient defaultHttpClient = new DefaultHttpClient(server.toURI(), httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration());
|
||||
MessageBodyHandlerRegistry defaultHandlerRegistry = defaultHttpClient.getHandlerRegistry();
|
||||
if (defaultHandlerRegistry instanceof ContextlessMessageBodyHandlerRegistry modifiableRegistry) {
|
||||
modifiableRegistry.add(MediaType.TEXT_JSON_TYPE, new NettyJsonHandler<>(JsonMapper.createDefault()));
|
||||
@@ -84,12 +75,12 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
return request;
|
||||
}
|
||||
|
||||
protected String apiUri(String path, String tenantId) {
|
||||
protected String apiUri(String path) {
|
||||
if (path == null || !path.startsWith("/")) {
|
||||
throw new IllegalArgumentException("'path' must be non-null and start with '/'");
|
||||
}
|
||||
|
||||
return "/api/v1/" + tenantId + path;
|
||||
return tenantId == null ? "/api/v1" + path : "/api/v1/" + tenantId + path;
|
||||
}
|
||||
|
||||
@Builder
|
||||
|
||||
@@ -4,17 +4,16 @@ import ch.qos.logback.classic.LoggerContext;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.commands.servers.ServerCommandInterface;
|
||||
import io.kestra.cli.services.StartupHookInterface;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.webserver.services.FlowAutoLoaderService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.yaml.YamlPropertySourceLoader;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import io.micronaut.management.endpoint.EndpointDefaultConfiguration;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import jakarta.inject.Provider;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import io.kestra.core.utils.Rethrow;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -27,20 +26,17 @@ import java.nio.file.Paths;
|
||||
import java.text.MessageFormat;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Callable;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
versionProvider = VersionProvider.class,
|
||||
mixinStandardHelpOptions = true,
|
||||
showDefaultValues = true
|
||||
)
|
||||
@Slf4j
|
||||
@Introspected
|
||||
public abstract class AbstractCommand implements Callable<Integer> {
|
||||
abstract public class AbstractCommand implements Callable<Integer> {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@@ -53,28 +49,22 @@ public abstract class AbstractCommand implements Callable<Integer> {
|
||||
@Inject
|
||||
private io.kestra.core.utils.VersionProvider versionProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginRegistry> pluginRegistryProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginManager> pluginManagerProvider;
|
||||
|
||||
private PluginRegistry pluginRegistry;
|
||||
|
||||
@Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
@CommandLine.Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
private boolean[] verbose = new boolean[0];
|
||||
|
||||
@Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
@CommandLine.Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
private LogLevel logLevel = LogLevel.INFO;
|
||||
|
||||
@Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
@CommandLine.Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
private boolean internalLog = false;
|
||||
|
||||
@Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
@CommandLine.Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
private Path config = Paths.get(System.getProperty("user.home"), ".kestra/config.yml");
|
||||
|
||||
@Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = Optional.ofNullable(System.getenv("KESTRA_PLUGINS_PATH")).map(Paths::get).orElse(null);
|
||||
@CommandLine.Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = System.getenv("KESTRA_PLUGINS_PATH") != null ? Paths.get(System.getenv("KESTRA_PLUGINS_PATH")) : null;
|
||||
|
||||
public enum LogLevel {
|
||||
TRACE,
|
||||
@@ -86,22 +76,16 @@ public abstract class AbstractCommand implements Callable<Integer> {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(Command.class).name());
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(CommandLine.Command.class).name());
|
||||
startLogger();
|
||||
sendServerLog();
|
||||
if (this.startupHook != null) {
|
||||
this.startupHook.start(this);
|
||||
}
|
||||
|
||||
if (pluginRegistryProvider != null && this.pluginsPath != null && loadExternalPlugins()) {
|
||||
pluginRegistry = pluginRegistryProvider.get();
|
||||
if (this.pluginsPath != null && loadExternalPlugins()) {
|
||||
pluginRegistry = pluginRegistry();
|
||||
pluginRegistry.registerIfAbsent(pluginsPath);
|
||||
|
||||
// PluginManager mus only be initialized if a registry is also instantiated
|
||||
if (isPluginManagerEnabled()) {
|
||||
PluginManager manager = pluginManagerProvider.get();
|
||||
manager.start();
|
||||
}
|
||||
}
|
||||
|
||||
startWebserver();
|
||||
@@ -118,15 +102,8 @@ public abstract class AbstractCommand implements Callable<Integer> {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies whether the {@link PluginManager} service must be initialized.
|
||||
* <p>
|
||||
* This method can be overridden by concrete commands.
|
||||
*
|
||||
* @return {@code true} if the {@link PluginManager} service must be initialized.
|
||||
*/
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return true;
|
||||
protected PluginRegistry pluginRegistry() {
|
||||
return KestraContext.getContext().getPluginRegistry(); // Lazy init
|
||||
}
|
||||
|
||||
private static String message(String message, Object... format) {
|
||||
@@ -180,6 +157,7 @@ public abstract class AbstractCommand implements Callable<Integer> {
|
||||
logger.getName().startsWith("io.kestra") &&
|
||||
!logger.getName().startsWith("io.kestra.ee.runner.kafka.services"))
|
||||
)
|
||||
|| logger.getName().startsWith("flow")
|
||||
)
|
||||
.forEach(
|
||||
logger -> logger.setLevel(ch.qos.logback.classic.Level.valueOf(this.logLevel.name()))
|
||||
@@ -205,9 +183,9 @@ public abstract class AbstractCommand implements Callable<Integer> {
|
||||
if (this.endpointConfiguration.getPort().isPresent()) {
|
||||
URI endpoint = null;
|
||||
try {
|
||||
endpoint = UriBuilder.of(server.getURL().toURI())
|
||||
.port(this.endpointConfiguration.getPort().get())
|
||||
.path("/health")
|
||||
endpoint = new URIBuilder(server.getURL().toURI())
|
||||
.setPort(this.endpointConfiguration.getPort().get())
|
||||
.setPath("/health")
|
||||
.build();
|
||||
} catch (URISyntaxException e) {
|
||||
e.printStackTrace();
|
||||
@@ -229,12 +207,10 @@ public abstract class AbstractCommand implements Callable<Integer> {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected void shutdownHook(boolean logShutdown, Rethrow.RunnableChecked<Exception> run) {
|
||||
protected void shutdownHook(Rethrow.RunnableChecked<Exception> run) {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(
|
||||
() -> {
|
||||
if (logShutdown) {
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
}
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
try {
|
||||
run.run();
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.cli;
|
||||
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.models.validations.ValidateConstraintViolation;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
@@ -10,7 +9,6 @@ import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.IOException;
|
||||
@@ -33,15 +31,6 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "the directory containing files to check")
|
||||
protected Path directory;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return local;
|
||||
}
|
||||
|
||||
public static void handleException(ConstraintViolationException e, String resource) {
|
||||
stdErr("\t@|fg(red) Unable to parse {0} due to the following error(s):|@", resource);
|
||||
e.getConstraintViolations()
|
||||
@@ -79,9 +68,10 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
}
|
||||
}
|
||||
|
||||
// bug in micronaut, we can't inject ModelValidator, so we inject from implementation
|
||||
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation
|
||||
public Integer call(
|
||||
Class<?> cls,
|
||||
YamlParser yamlParser,
|
||||
ModelValidator modelValidator,
|
||||
Function<Object, String> identity,
|
||||
Function<Object, List<String>> warningsFunction,
|
||||
@@ -98,7 +88,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.forEach(path -> {
|
||||
try {
|
||||
Object parse = YamlParser.parse(path.toFile(), cls);
|
||||
Object parse = yamlParser.parse(path.toFile(), cls);
|
||||
modelValidator.validate(parse);
|
||||
stdOut("@|green \u2713|@ - " + identity.apply(parse));
|
||||
List<String> warnings = warningsFunction.apply(parse);
|
||||
@@ -117,7 +107,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/validate", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/validate"), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<ValidateConstraintViolation> validations = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli;
|
||||
|
||||
import io.kestra.cli.commands.configs.sys.ConfigCommand;
|
||||
import io.kestra.cli.commands.flows.FlowCommand;
|
||||
import io.kestra.cli.commands.migrations.MigrationCommand;
|
||||
import io.kestra.cli.commands.namespaces.NamespaceCommand;
|
||||
import io.kestra.cli.commands.plugins.PluginCommand;
|
||||
import io.kestra.cli.commands.servers.ServerCommand;
|
||||
@@ -43,7 +42,6 @@ import java.util.concurrent.Callable;
|
||||
SysCommand.class,
|
||||
ConfigCommand.class,
|
||||
NamespaceCommand.class,
|
||||
MigrationCommand.class,
|
||||
}
|
||||
)
|
||||
@Introspected
|
||||
@@ -66,14 +64,8 @@ public class App implements Callable<Integer> {
|
||||
ApplicationContext applicationContext = App.applicationContext(cls, args);
|
||||
|
||||
// Call Picocli command
|
||||
int exitCode = 0;
|
||||
try {
|
||||
exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
|
||||
} catch (CommandLine.InitializationException e){
|
||||
System.err.println("Could not initialize picoli ComandLine, err: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
exitCode = 1;
|
||||
}
|
||||
int exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
|
||||
|
||||
applicationContext.close();
|
||||
|
||||
// exit code
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package io.kestra.core.validations;
|
||||
package io.kestra.cli;
|
||||
|
||||
import io.micronaut.context.annotation.Context;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
@@ -18,8 +18,6 @@ import picocli.CommandLine;
|
||||
FlowNamespaceCommand.class,
|
||||
FlowDotCommand.class,
|
||||
FlowExportCommand.class,
|
||||
FlowUpdateCommand.class,
|
||||
FlowUpdatesCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
|
||||
@@ -2,13 +2,11 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -25,9 +23,6 @@ public class FlowCreateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
|
||||
public Path flowFile;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -39,7 +34,7 @@ public class FlowCreateCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows"), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -2,12 +2,10 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -25,9 +23,6 @@ public class FlowDeleteCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "1", description = "The ID of the flow")
|
||||
public String id;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -35,7 +30,7 @@ public class FlowDeleteCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.DELETE(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)));
|
||||
.DELETE(apiUri("/flows/" + namespace + "/" + id ));
|
||||
|
||||
client.toBlocking().exchange(
|
||||
this.requestOptions(request)
|
||||
|
||||
@@ -29,7 +29,8 @@ public class FlowDotCommand extends AbstractCommand {
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
Flow flow = YamlParser.parse(file.toFile(), Flow.class);
|
||||
YamlParser parser = applicationContext.getBean(YamlParser.class);
|
||||
Flow flow = parser.parse(file.toFile(), Flow.class);
|
||||
|
||||
GraphCluster graph = GraphUtils.of(flow, null);
|
||||
|
||||
|
||||
@@ -20,6 +20,9 @@ public class FlowExpandCommand extends AbstractCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
|
||||
private Path file;
|
||||
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@@ -28,7 +31,7 @@ public class FlowExpandCommand extends AbstractCommand {
|
||||
super.call();
|
||||
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
|
||||
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
|
||||
Flow flow = YamlParser.parse(content, Flow.class);
|
||||
Flow flow = yamlParser.parse(content, Flow.class);
|
||||
modelValidator.validate(flow);
|
||||
stdOut(content);
|
||||
return 0;
|
||||
|
||||
@@ -2,7 +2,7 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.HttpResponse;
|
||||
import io.micronaut.http.MediaType;
|
||||
@@ -25,8 +25,9 @@ import java.nio.file.Path;
|
||||
public class FlowExportCommand extends AbstractApiCommand {
|
||||
private static final String DEFAULT_FILE_NAME = "flows.zip";
|
||||
|
||||
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of flows to export")
|
||||
public String namespace;
|
||||
@@ -40,7 +41,7 @@ public class FlowExportCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<Object> request = HttpRequest
|
||||
.GET(apiUri("/flows/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.GET(apiUri("/flows/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.accept(MediaType.APPLICATION_OCTET_STREAM);
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
@@ -31,7 +30,7 @@ import java.util.concurrent.TimeoutException;
|
||||
description = "Test a flow"
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowTestCommand extends AbstractApiCommand {
|
||||
public class FlowTestCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@@ -77,7 +76,6 @@ public class FlowTestCommand extends AbstractApiCommand {
|
||||
FlowRepositoryInterface flowRepository = applicationContext.getBean(FlowRepositoryInterface.class);
|
||||
FlowInputOutput flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
|
||||
RunnerUtils runnerUtils = applicationContext.getBean(RunnerUtils.class);
|
||||
TenantIdSelectorService tenantService = applicationContext.getBean(TenantIdSelectorService.class);
|
||||
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
@@ -91,7 +89,7 @@ public class FlowTestCommand extends AbstractApiCommand {
|
||||
|
||||
try {
|
||||
runner.run();
|
||||
repositoryLoader.load(tenantService.getTenantId(tenantId), file.toFile());
|
||||
repositoryLoader.load(file.toFile());
|
||||
|
||||
List<Flow> all = flowRepository.findAllForAllTenants();
|
||||
if (all.size() != 1) {
|
||||
|
||||
@@ -2,13 +2,11 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -31,9 +29,6 @@ public class FlowUpdateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "2", description = "The ID of the flow")
|
||||
public String id;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -45,7 +40,7 @@ public class FlowUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.PUT(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
.PUT(apiUri("/flows/" + namespace + "/" + id ), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
@@ -10,7 +9,6 @@ import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
@@ -35,12 +33,6 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
|
||||
public String namespace;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantIdSelectorService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -66,12 +58,8 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
body = String.join("\n---\n", flows);
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
String namespaceQuery = "";
|
||||
if (namespace != null) {
|
||||
namespaceQuery = "&namespace=" + namespace;
|
||||
}
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/bulk", tenantIdSelectorService.getTenantId(tenantId)) + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/bulk") + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
@@ -92,9 +80,4 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
@@ -16,6 +16,8 @@ import java.util.List;
|
||||
description = "Validate a flow"
|
||||
)
|
||||
public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -23,28 +25,26 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private FlowService flowService;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
FlowWithSource.class,
|
||||
Flow.class,
|
||||
yamlParser,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
Flow flow = (Flow) object;
|
||||
return flow.getNamespace() + " / " + flow.getId();
|
||||
},
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
Flow flow = (Flow) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow, tenantService.getTenantId(tenantId)));
|
||||
warnings.addAll(flowService.warnings(flow));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
Flow flow = (Flow) object;
|
||||
return flowService.relocations(flow.generateSource()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package io.kestra.cli.commands.flows.namespaces;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.commands.flows.IncludeHelperExpander;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
@@ -28,13 +27,12 @@ import java.util.List;
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
@Inject
|
||||
public YamlParser yamlParser;
|
||||
|
||||
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
|
||||
public boolean override = false;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -64,7 +62,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/") + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "migrate",
|
||||
description = "handle migrations",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TenantMigrationCommand.class,
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class MigrationCommand extends AbstractCommand {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "migrate", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.repositories.TenantMigrationInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "default-tenant",
|
||||
description = "migrate every elements from no tenant to the main tenant"
|
||||
)
|
||||
@Slf4j
|
||||
public class TenantMigrationCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Option(names = "--tenant-id", description = "tenant identifier")
|
||||
String tenantId;
|
||||
|
||||
@Option(names = "--tenant-name", description = "tenant name")
|
||||
String tenantName;
|
||||
|
||||
@Option(names = "--dry-run", description = "Preview only, do not update")
|
||||
boolean dryRun;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (dryRun) {
|
||||
System.out.println("🧪 Dry-run mode enabled. No changes will be applied.");
|
||||
}
|
||||
|
||||
TenantMigrationService migrationService = this.applicationContext.getBean(TenantMigrationService.class);
|
||||
try {
|
||||
migrationService.migrateTenant(tenantId, tenantName, dryRun);
|
||||
System.out.println("✅ Tenant migration complete.");
|
||||
} catch (Exception e) {
|
||||
System.err.println("❌ Tenant migration failed: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
import com.github.javaparser.utils.Log;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.TenantMigrationInterface;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class TenantMigrationService {
|
||||
|
||||
@Inject
|
||||
private TenantMigrationInterface tenantMigrationInterface;
|
||||
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.FLOW_NAMED)
|
||||
private QueueInterface<FlowInterface> flowQueue;
|
||||
|
||||
public void migrateTenant(String tenantId, String tenantName, boolean dryRun) {
|
||||
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
|
||||
throw new KestraRuntimeException("Tenant configuration is an enterprise feature. It can only be main in OSS");
|
||||
}
|
||||
|
||||
Log.info("🔁 Starting tenant migration...");
|
||||
tenantMigrationInterface.migrateTenant(MAIN_TENANT, dryRun);
|
||||
migrateQueue(dryRun);
|
||||
}
|
||||
|
||||
protected void migrateQueue(boolean dryRun) {
|
||||
if (!dryRun){
|
||||
log.info("🔁 Starting restoring queue...");
|
||||
flowRepository.findAllWithSourceForAllTenants().forEach(flow -> {
|
||||
try {
|
||||
flowQueue.emit(flow);
|
||||
} catch (QueueException e) {
|
||||
log.warn("Unable to send the flow {} to the queue", flow.uid(), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -2,14 +2,12 @@ package io.kestra.cli.commands.namespaces.files;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.utils.KestraIgnore;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.multipart.MultipartBody;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -36,9 +34,6 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
private static final String KESTRA_IGNORE_FILE = ".kestraignore";
|
||||
|
||||
@Override
|
||||
@@ -49,7 +44,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
try (var files = Files.walk(from); DefaultHttpClient client = client()) {
|
||||
if (delete) {
|
||||
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + to, null)));
|
||||
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/") + namespace + "/files?path=" + to, null)));
|
||||
}
|
||||
|
||||
KestraIgnore kestraIgnore = new KestraIgnore(from);
|
||||
@@ -67,7 +62,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
client.toBlocking().exchange(
|
||||
this.requestOptions(
|
||||
HttpRequest.POST(
|
||||
apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + destination,
|
||||
apiUri("/namespaces/") + namespace + "/files?path=" + destination,
|
||||
body
|
||||
).contentType(MediaType.MULTIPART_FORM_DATA)
|
||||
)
|
||||
|
||||
@@ -3,13 +3,11 @@ package io.kestra.cli.commands.namespaces.kv;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
@@ -44,9 +42,6 @@ public class KvUpdateCommand extends AbstractApiCommand {
|
||||
@Option(names = {"-f", "--file-value"}, description = "The file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
|
||||
public Path fileValue;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -61,7 +56,7 @@ public class KvUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
Duration ttl = expiration == null ? null : Duration.parse(expiration);
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.PUT(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/kv/" + key, value)
|
||||
.PUT(apiUri("/namespaces/") + namespace + "/kv/" + key, value)
|
||||
.contentType(MediaType.APPLICATION_JSON_TYPE);
|
||||
|
||||
if (ttl != null) {
|
||||
|
||||
@@ -1,37 +1,31 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import picocli.CommandLine.Command;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
name = "plugins",
|
||||
description = "Manage plugins",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
PluginInstallCommand.class,
|
||||
PluginUninstallCommand.class,
|
||||
PluginListCommand.class,
|
||||
PluginDocCommand.class,
|
||||
PluginSearchCommand.class
|
||||
PluginDocCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class PluginCommand extends AbstractCommand {
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package io.kestra.cli.commands.plugins;
|
||||
import com.google.common.io.Files;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.docs.DocumentationGenerator;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -18,8 +17,6 @@ import java.nio.file.Paths;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
|
||||
import static io.kestra.core.models.Plugin.isDeprecated;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "doc",
|
||||
description = "Generate documentation for all plugins currently installed"
|
||||
@@ -40,23 +37,13 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
@CommandLine.Option(names = {"--schema"}, description = "Also write JSON Schema for each task")
|
||||
private boolean schema = false;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-deprecated"},description = "Skip deprecated plugins when generating documentations")
|
||||
private boolean skipDeprecated = false;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
DocumentationGenerator documentationGenerator = applicationContext.getBean(DocumentationGenerator.class);
|
||||
|
||||
PluginRegistry registry = pluginRegistryProvider.get();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
if (skipDeprecated) {
|
||||
plugins = plugins.stream()
|
||||
.filter(plugin -> !isDeprecated(plugin.getClass()))
|
||||
.toList();
|
||||
}
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
boolean hasFailures = false;
|
||||
|
||||
for (RegisteredPlugin registeredPlugin : plugins) {
|
||||
try {
|
||||
documentationGenerator
|
||||
@@ -113,10 +100,4 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
|
||||
return hasFailures ? 1 : 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,133 +1,98 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginCatalogService;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.plugins.PluginDownloader;
|
||||
import io.kestra.cli.plugins.RepositoryConfig;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import jakarta.inject.Provider;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
@Command(
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "install",
|
||||
description = "Install plugins"
|
||||
)
|
||||
public class PluginInstallCommand extends AbstractCommand {
|
||||
|
||||
@Option(names = {"--locally"}, description = "Specifies if plugins must be installed locally. If set to false the installation depends on your Kestra configuration.")
|
||||
boolean locally = true;
|
||||
|
||||
@Option(names = {"--all"}, description = "Install all available plugins")
|
||||
boolean all = false;
|
||||
|
||||
@Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
@CommandLine.Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates.")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
@CommandLine.Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
private URI[] repositories;
|
||||
|
||||
@Spec
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Inject
|
||||
Provider<PluginCatalogService> pluginCatalogService;
|
||||
private PluginDownloader pluginDownloader;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (this.locally && this.pluginsPath == null) {
|
||||
if (this.pluginsPath == null) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " +
|
||||
"or environment variable 'KESTRA_PLUGINS_PATH"
|
||||
);
|
||||
}
|
||||
|
||||
List<MavenPluginRepositoryConfig> repositoryConfigs = List.of();
|
||||
if (!pluginsPath.toFile().exists()) {
|
||||
if (!pluginsPath.toFile().mkdir()) {
|
||||
throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
if (repositories != null) {
|
||||
repositoryConfigs = Arrays.stream(repositories)
|
||||
.map(uri -> {
|
||||
MavenPluginRepositoryConfig.MavenPluginRepositoryConfigBuilder builder = MavenPluginRepositoryConfig
|
||||
.builder()
|
||||
Arrays.stream(repositories)
|
||||
.forEach(throwConsumer(s -> {
|
||||
URIBuilder uriBuilder = new URIBuilder(s);
|
||||
|
||||
RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder()
|
||||
.id(IdUtils.create());
|
||||
|
||||
String userInfo = uri.getUserInfo();
|
||||
if (userInfo != null) {
|
||||
String[] userInfoParts = userInfo.split(":");
|
||||
builder = builder.basicAuth(new MavenPluginRepositoryConfig.BasicAuth(
|
||||
userInfoParts[0],
|
||||
userInfoParts[1]
|
||||
if (uriBuilder.getUserInfo() != null) {
|
||||
int index = uriBuilder.getUserInfo().indexOf(":");
|
||||
|
||||
builder.basicAuth(new RepositoryConfig.BasicAuth(
|
||||
uriBuilder.getUserInfo().substring(0, index),
|
||||
uriBuilder.getUserInfo().substring(index + 1)
|
||||
));
|
||||
|
||||
uriBuilder.setUserInfo(null);
|
||||
}
|
||||
builder.url(UriBuilder.of(uri).userInfo(null).build().toString());
|
||||
return builder.build();
|
||||
}).toList();
|
||||
|
||||
builder.url(uriBuilder.build().toString());
|
||||
|
||||
pluginDownloader.addRepository(builder.build());
|
||||
}));
|
||||
}
|
||||
|
||||
if (all) {
|
||||
PluginCatalogService service = pluginCatalogService.get();
|
||||
dependencies = service.get().stream().map(Objects::toString).toList();
|
||||
List<URL> resolveUrl = pluginDownloader.resolve(dependencies);
|
||||
stdOut("Resolved Plugin(s) with {0}", resolveUrl);
|
||||
|
||||
for (URL url: resolveUrl) {
|
||||
Files.copy(
|
||||
Paths.get(url.toURI()),
|
||||
Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
);
|
||||
}
|
||||
|
||||
if (dependencies.isEmpty()) {
|
||||
stdErr("Error: No plugin to install.");
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath);
|
||||
|
||||
final List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
try (final PluginManager pluginManager = getPluginManager()) {
|
||||
|
||||
List<PluginArtifact> installed;
|
||||
if (all) {
|
||||
installed = new ArrayList<>(pluginArtifacts.size());
|
||||
for (PluginArtifact pluginArtifact : pluginArtifacts) {
|
||||
try {
|
||||
installed.add(pluginManager.install(pluginArtifact, repositoryConfigs, false, pluginsPath));
|
||||
} catch (KestraRuntimeException e) {
|
||||
String cause = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
|
||||
stdErr("Failed to install plugin {0}. Cause: {1}", pluginArtifact, cause);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
installed = pluginManager.install(pluginArtifacts, repositoryConfigs, false, pluginsPath);
|
||||
}
|
||||
|
||||
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
}
|
||||
|
||||
private PluginManager getPluginManager() {
|
||||
return locally ? new LocalPluginManager(mavenPluginRepositoryProvider.get()) : this.pluginManagerProvider.get();
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,31 +1,22 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
@CommandLine.Command(
|
||||
name = "list",
|
||||
description = "List all plugins already installed"
|
||||
)
|
||||
public class PluginListCommand extends AbstractCommand {
|
||||
@Spec
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
private boolean core = false;
|
||||
|
||||
@Inject
|
||||
private PluginRegistry registry;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -36,8 +27,7 @@ public class PluginListCommand extends AbstractCommand {
|
||||
);
|
||||
}
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
plugins.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
name = "search",
|
||||
description = "Search for available Kestra plugins"
|
||||
)
|
||||
public class PluginSearchCommand extends AbstractCommand {
|
||||
@Inject
|
||||
@Client("api")
|
||||
private HttpClient httpClient;
|
||||
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
private static final char SPACE = ' ';
|
||||
|
||||
@Parameters(index = "0", description = "Search term (optional)", defaultValue = "")
|
||||
private String searchTerm;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try {
|
||||
JsonNode root = fetchPlugins();
|
||||
List<PluginInfo> plugins = findPlugins(root);
|
||||
printResults(plugins);
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
stdOut("Error processing plugins: {0}", e.getMessage());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private JsonNode fetchPlugins() throws Exception {
|
||||
String response = httpClient.toBlocking()
|
||||
.retrieve(
|
||||
HttpRequest.GET("/v1/plugins")
|
||||
.header("Accept", "application/json")
|
||||
);
|
||||
return MAPPER.readTree(response);
|
||||
}
|
||||
|
||||
private List<PluginInfo> findPlugins(JsonNode root) {
|
||||
String searchTermLower = searchTerm.toLowerCase();
|
||||
List<PluginInfo> plugins = new ArrayList<>();
|
||||
|
||||
for (JsonNode plugin : root) {
|
||||
if (matchesSearch(plugin, searchTermLower)) {
|
||||
plugins.add(new PluginInfo(
|
||||
plugin.path("name").asText(),
|
||||
plugin.path("title").asText(),
|
||||
plugin.path("group").asText(),
|
||||
plugin.path("version").asText("")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
plugins.sort((p1, p2) -> p1.name.compareToIgnoreCase(p2.name));
|
||||
return plugins;
|
||||
}
|
||||
|
||||
private boolean matchesSearch(JsonNode plugin, String term) {
|
||||
if (term.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return plugin.path("name").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("title").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("group").asText().toLowerCase().contains(term);
|
||||
}
|
||||
|
||||
private void printResults(List<PluginInfo> plugins) {
|
||||
if (plugins.isEmpty()) {
|
||||
stdOut("No plugins found{0}",
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'");
|
||||
return;
|
||||
}
|
||||
|
||||
stdOut("\nFound {0} plugins{1}",
|
||||
plugins.size(),
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'"
|
||||
);
|
||||
|
||||
printPluginsTable(plugins);
|
||||
}
|
||||
|
||||
private void printPluginsTable(List<PluginInfo> plugins) {
|
||||
int maxName = 4, maxTitle = 5, maxGroup = 5;
|
||||
for (PluginInfo plugin : plugins) {
|
||||
maxName = Math.max(maxName, plugin.name.length());
|
||||
maxTitle = Math.max(maxTitle, plugin.title.length());
|
||||
maxGroup = Math.max(maxGroup, plugin.group.length());
|
||||
}
|
||||
|
||||
StringBuilder namePad = new StringBuilder(maxName);
|
||||
StringBuilder titlePad = new StringBuilder(maxTitle);
|
||||
StringBuilder groupPad = new StringBuilder(maxGroup);
|
||||
|
||||
stdOut("");
|
||||
printRow(namePad, titlePad, groupPad, "NAME", "TITLE", "GROUP", "VERSION",
|
||||
maxName, maxTitle, maxGroup);
|
||||
|
||||
for (PluginInfo plugin : plugins) {
|
||||
printRow(namePad, titlePad, groupPad, plugin.name, plugin.title, plugin.group, plugin.version,
|
||||
maxName, maxTitle, maxGroup);
|
||||
}
|
||||
stdOut("");
|
||||
}
|
||||
|
||||
private void printRow(StringBuilder namePad, StringBuilder titlePad, StringBuilder groupPad,
|
||||
String name, String title, String group, String version,
|
||||
int maxName, int maxTitle, int maxGroup) {
|
||||
stdOut("{0} {1} {2} {3}",
|
||||
pad(namePad, name, maxName),
|
||||
pad(titlePad, title, maxTitle),
|
||||
pad(groupPad, group, maxGroup),
|
||||
version
|
||||
);
|
||||
}
|
||||
|
||||
private String pad(StringBuilder sb, String str, int length) {
|
||||
sb.setLength(0);
|
||||
sb.append(str);
|
||||
while (sb.length() < length) {
|
||||
sb.append(SPACE);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private record PluginInfo(String name, String title, String group, String version) {}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "uninstall",
|
||||
description = "Uninstall plugins"
|
||||
)
|
||||
public class PluginUninstallCommand extends AbstractCommand {
|
||||
@Parameters(index = "0..*", description = "The plugins to uninstall. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
final PluginManager pluginManager;
|
||||
|
||||
// If a PLUGIN_PATH is provided, then use the LocalPluginManager
|
||||
if (pluginsPath != null) {
|
||||
pluginManager = new LocalPluginManager(mavenPluginRepositoryProvider.get());
|
||||
} else {
|
||||
// Otherwise, we delegate to the configured plugin-manager.
|
||||
pluginManager = this.pluginManagerProvider.get();
|
||||
}
|
||||
|
||||
List<PluginArtifact> uninstalled = pluginManager.uninstall(
|
||||
pluginArtifacts,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = uninstalled.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully uninstalled plugins {0} from {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,21 +1,13 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import picocli.CommandLine;
|
||||
|
||||
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
|
||||
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
|
||||
Integer serverPort;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
|
||||
return super.call();
|
||||
}
|
||||
|
||||
protected static int defaultWorkerThread() {
|
||||
return Runtime.getRuntime().availableProcessors() * 8;
|
||||
return Runtime.getRuntime().availableProcessors() * 4;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
@@ -8,6 +9,7 @@ import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
@@ -18,6 +20,7 @@ import java.util.Map;
|
||||
name = "executor",
|
||||
description = "Start the Kestra executor"
|
||||
)
|
||||
@Slf4j
|
||||
public class ExecutorCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -63,10 +66,13 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
|
||||
executorService.run();
|
||||
|
||||
log.info("Executor started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Map;
|
||||
@@ -14,6 +16,7 @@ import java.util.Map;
|
||||
name = "indexer",
|
||||
description = "Start the Kestra indexer"
|
||||
)
|
||||
@Slf4j
|
||||
public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -28,10 +31,13 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
|
||||
indexer.run();
|
||||
|
||||
log.info("Indexer started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.schedulers.AbstractScheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -30,10 +31,12 @@ public class SchedulerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
|
||||
scheduler.run();
|
||||
|
||||
log.info("Scheduler started");
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.services.FileChangedEventListener;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
@@ -13,6 +12,7 @@ import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.File;
|
||||
@@ -25,6 +25,7 @@ import java.util.Map;
|
||||
name = "standalone",
|
||||
description = "Start the standalone all-in-one server"
|
||||
)
|
||||
@Slf4j
|
||||
public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
@@ -45,10 +46,7 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "the flow path containing flow to inject at startup (when running with a memory flow repository)")
|
||||
private File flowPath;
|
||||
|
||||
@CommandLine.Option(names = "--tenant", description = "Tenant identifier, Required to load flows from path with the enterprise edition")
|
||||
private String tenantId;
|
||||
|
||||
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to eight times the number of available processors. Set it to 0 to avoid starting a worker.")
|
||||
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to four times the number of available processors. Set it to 0 to avoid starting a worker.")
|
||||
private int workerThread = defaultWorkerThread();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@@ -93,17 +91,16 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
this.skipExecutionService.setSkipFlows(skipFlows);
|
||||
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
|
||||
this.skipExecutionService.setSkipTenants(skipTenants);
|
||||
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
if (flowPath != null) {
|
||||
try {
|
||||
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
|
||||
TenantIdSelectorService tenantIdSelectorService = applicationContext.getBean(TenantIdSelectorService.class);
|
||||
localFlowRepositoryLoader.load(tenantIdSelectorService.getTenantId(this.tenantId), this.flowPath);
|
||||
localFlowRepositoryLoader.load(this.flowPath);
|
||||
} catch (IOException e) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
|
||||
}
|
||||
@@ -127,6 +124,8 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
fileWatcher.startListeningFromConfig();
|
||||
}
|
||||
|
||||
this.shutdownHook(standAloneRunner::close);
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
@@ -55,11 +57,20 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
|
||||
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
|
||||
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
|
||||
shutdownHook(false, () -> poolExecutor.shutdown());
|
||||
}
|
||||
|
||||
log.info("Webserver started");
|
||||
this.shutdownHook(() -> {
|
||||
this.close();
|
||||
KestraContext.getContext().shutdown();
|
||||
});
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void close() {
|
||||
if (this.poolExecutor != null) {
|
||||
this.poolExecutor.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import io.kestra.core.runners.Worker;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@@ -17,12 +18,13 @@ import java.util.UUID;
|
||||
name = "worker",
|
||||
description = "Start the Kestra worker"
|
||||
)
|
||||
@Slf4j
|
||||
public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to eight times the number of available processors")
|
||||
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to four times the number of available processors")
|
||||
private int thread = defaultWorkerThread();
|
||||
|
||||
@Option(names = {"-g", "--worker-group"}, description = "The worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")
|
||||
@@ -37,11 +39,8 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(thread, workerGroupKey);
|
||||
|
||||
super.call();
|
||||
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
|
||||
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
|
||||
}
|
||||
@@ -53,6 +52,13 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
worker.run();
|
||||
|
||||
if (this.workerGroupKey != null) {
|
||||
log.info("Worker started with {} thread(s) in group '{}'", this.thread, this.workerGroupKey);
|
||||
}
|
||||
else {
|
||||
log.info("Worker started with {} thread(s)", this.thread);
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.sys;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -10,7 +9,6 @@ import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "reindex",
|
||||
@@ -35,8 +33,8 @@ public class ReindexCommand extends AbstractCommand {
|
||||
List<Flow> allFlow = flowRepository.findAllForAllTenants();
|
||||
allFlow.stream()
|
||||
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
|
||||
.filter(Objects::nonNull)
|
||||
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
|
||||
.filter(flow -> flow != null)
|
||||
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow()));
|
||||
|
||||
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@ package io.kestra.cli.commands.templates;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.HttpResponse;
|
||||
import io.micronaut.http.MediaType;
|
||||
@@ -27,8 +27,9 @@ import java.nio.file.Path;
|
||||
public class TemplateExportCommand extends AbstractApiCommand {
|
||||
private static final String DEFAULT_FILE_NAME = "templates.zip";
|
||||
|
||||
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
|
||||
public String namespace;
|
||||
@@ -42,7 +43,7 @@ public class TemplateExportCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<Object> request = HttpRequest
|
||||
.GET(apiUri("/templates/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.GET(apiUri("/templates/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.accept(MediaType.APPLICATION_OCTET_STREAM);
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -15,6 +16,8 @@ import java.util.Collections;
|
||||
)
|
||||
@TemplateEnabled
|
||||
public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -23,6 +26,7 @@ public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
Template.class,
|
||||
yamlParser,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
Template template = (Template) object;
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.templates.namespaces;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
@@ -28,9 +27,8 @@ import jakarta.validation.ConstraintViolationException;
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
public YamlParser yamlParser;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -40,7 +38,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
|
||||
List<Template> templates = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> YamlParser.parse(path.toFile(), Template.class))
|
||||
.map(path -> yamlParser.parse(path.toFile(), Template.class))
|
||||
.toList();
|
||||
|
||||
if (templates.isEmpty()) {
|
||||
@@ -49,7 +47,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
|
||||
|
||||
try (DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<List<Template>> request = HttpRequest
|
||||
.POST(apiUri("/templates/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, templates);
|
||||
.POST(apiUri("/templates/") + namespace + "?delete=" + delete, templates);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
153
cli/src/main/java/io/kestra/cli/plugins/PluginDownloader.java
Normal file
153
cli/src/main/java/io/kestra/cli/plugins/PluginDownloader.java
Normal file
@@ -0,0 +1,153 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.repository.internal.MavenRepositorySystemUtils;
|
||||
import org.eclipse.aether.DefaultRepositorySystemSession;
|
||||
import org.eclipse.aether.RepositorySystem;
|
||||
import org.eclipse.aether.RepositorySystemSession;
|
||||
import org.eclipse.aether.artifact.Artifact;
|
||||
import org.eclipse.aether.artifact.DefaultArtifact;
|
||||
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
|
||||
import org.eclipse.aether.impl.DefaultServiceLocator;
|
||||
import org.eclipse.aether.repository.LocalRepository;
|
||||
import org.eclipse.aether.repository.RemoteRepository;
|
||||
import org.eclipse.aether.resolution.*;
|
||||
import org.eclipse.aether.spi.connector.RepositoryConnectorFactory;
|
||||
import org.eclipse.aether.spi.connector.transport.TransporterFactory;
|
||||
import org.eclipse.aether.transport.file.FileTransporterFactory;
|
||||
import org.eclipse.aether.transport.http.HttpTransporterFactory;
|
||||
import org.eclipse.aether.util.repository.AuthenticationBuilder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class PluginDownloader {
|
||||
private final List<RepositoryConfig> repositoryConfigs;
|
||||
private final RepositorySystem system;
|
||||
private final RepositorySystemSession session;
|
||||
|
||||
@Inject
|
||||
public PluginDownloader(
|
||||
List<RepositoryConfig> repositoryConfigs,
|
||||
@Nullable @Value("${kestra.plugins.local-repository-path}") String localRepositoryPath
|
||||
) {
|
||||
this.repositoryConfigs = repositoryConfigs;
|
||||
this.system = repositorySystem();
|
||||
this.session = repositorySystemSession(system, localRepositoryPath);
|
||||
}
|
||||
|
||||
public void addRepository(RepositoryConfig repositoryConfig) {
|
||||
this.repositoryConfigs.add(repositoryConfig);
|
||||
}
|
||||
|
||||
public List<URL> resolve(List<String> dependencies) throws MalformedURLException, ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<RemoteRepository> repositories = remoteRepositories();
|
||||
|
||||
List<ArtifactResult> artifactResults = resolveArtifacts(repositories, dependencies);
|
||||
List<URL> localUrls = resolveUrls(artifactResults);
|
||||
log.debug("Resolved Plugin {} with {}", dependencies, localUrls);
|
||||
|
||||
return localUrls;
|
||||
}
|
||||
|
||||
private List<RemoteRepository> remoteRepositories() {
|
||||
return repositoryConfigs
|
||||
.stream()
|
||||
.map(repositoryConfig -> {
|
||||
var build = new RemoteRepository.Builder(
|
||||
repositoryConfig.getId(),
|
||||
"default",
|
||||
repositoryConfig.getUrl()
|
||||
);
|
||||
|
||||
if (repositoryConfig.getBasicAuth() != null) {
|
||||
var authenticationBuilder = new AuthenticationBuilder();
|
||||
authenticationBuilder.addUsername(repositoryConfig.getBasicAuth().getUsername());
|
||||
authenticationBuilder.addPassword(repositoryConfig.getBasicAuth().getPassword());
|
||||
|
||||
build.setAuthentication(authenticationBuilder.build());
|
||||
}
|
||||
|
||||
return build.build();
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static RepositorySystem repositorySystem() {
|
||||
DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator();
|
||||
locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
|
||||
locator.addService(TransporterFactory.class, FileTransporterFactory.class);
|
||||
locator.addService(TransporterFactory.class, HttpTransporterFactory.class);
|
||||
|
||||
return locator.getService(RepositorySystem.class);
|
||||
}
|
||||
|
||||
private RepositorySystemSession repositorySystemSession(RepositorySystem system, String localRepositoryPath) {
|
||||
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
|
||||
|
||||
if (localRepositoryPath == null) {
|
||||
try {
|
||||
final String tempDirectory = Files.createTempDirectory(this.getClass().getSimpleName().toLowerCase())
|
||||
.toAbsolutePath()
|
||||
.toString();
|
||||
|
||||
localRepositoryPath = tempDirectory;
|
||||
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
FileUtils.deleteDirectory(new File(tempDirectory));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
LocalRepository localRepo = new LocalRepository(localRepositoryPath);
|
||||
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
private List<ArtifactResult> resolveArtifacts(List<RemoteRepository> repositories, List<String> dependencies) throws ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<ArtifactResult> results = new ArrayList<>(dependencies.size());
|
||||
for (String dependency: dependencies) {
|
||||
var artifact = new DefaultArtifact(dependency);
|
||||
var version = system.resolveVersionRange(session, new VersionRangeRequest(artifact, repositories, null));
|
||||
var artifactRequest = new ArtifactRequest(
|
||||
new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "jar", version.getHighestVersion().toString()),
|
||||
repositories,
|
||||
null
|
||||
);
|
||||
var artifactResult = system.resolveArtifact(session, artifactRequest);
|
||||
results.add(artifactResult);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<URL> resolveUrls(List<ArtifactResult> artifactResults) throws MalformedURLException {
|
||||
ImmutableList.Builder<URL> urls = ImmutableList.builder();
|
||||
for (ArtifactResult artifactResult : artifactResults) {
|
||||
URL url;
|
||||
url = artifactResult.getArtifact().getFile().toPath().toUri().toURL();
|
||||
urls.add(url);
|
||||
}
|
||||
return urls.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class RepositoryConfig {
|
||||
String id;
|
||||
|
||||
String url;
|
||||
|
||||
BasicAuth basicAuth;
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public static class BasicAuth {
|
||||
private String username;
|
||||
private String password;
|
||||
}
|
||||
|
||||
public RepositoryConfig(@Parameter String id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
@@ -1,27 +1,27 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.exceptions.FlowProcessingException;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithPath;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@@ -40,18 +40,26 @@ public class FileChangedEventListener {
|
||||
@Inject
|
||||
private PluginDefaultService pluginDefaultService;
|
||||
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
protected FlowListenersInterface flowListeners;
|
||||
|
||||
@Nullable
|
||||
@Value("${micronaut.io.watch.tenantId}")
|
||||
private String tenantId;
|
||||
|
||||
FlowFilesManager flowFilesManager;
|
||||
|
||||
private List<FlowWithPath> flows = new CopyOnWriteArrayList<>();
|
||||
private List<FlowWithPath> flows = new ArrayList<>();
|
||||
|
||||
private boolean isStarted = false;
|
||||
|
||||
|
||||
@Inject
|
||||
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
|
||||
this.fileWatchConfiguration = fileWatchConfiguration;
|
||||
@@ -60,7 +68,7 @@ public class FileChangedEventListener {
|
||||
|
||||
public void startListeningFromConfig() throws IOException, InterruptedException {
|
||||
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface, pluginDefaultService);
|
||||
List<Path> paths = fileWatchConfiguration.getPaths();
|
||||
this.setup(paths);
|
||||
|
||||
@@ -68,7 +76,7 @@ public class FileChangedEventListener {
|
||||
// Init existing flows not already in files
|
||||
flowListeners.listen(flows -> {
|
||||
if (!isStarted) {
|
||||
for (FlowInterface flow : flows) {
|
||||
for (FlowWithSource flow : flows) {
|
||||
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
|
||||
flowToFile(flow, this.buildPath(flow));
|
||||
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
|
||||
@@ -129,7 +137,7 @@ public class FileChangedEventListener {
|
||||
try {
|
||||
String content = Files.readString(filePath, Charset.defaultCharset());
|
||||
|
||||
Optional<FlowWithSource> flow = parseFlow(content, entry);
|
||||
Optional<Flow> flow = parseFlow(content, entry);
|
||||
if (flow.isPresent()) {
|
||||
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
// Check if we already have a file with the given path
|
||||
@@ -148,20 +156,12 @@ public class FileChangedEventListener {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(filePath), content));
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
|
||||
}
|
||||
|
||||
} catch (NoSuchFileException e) {
|
||||
log.warn("File not found: {}, deleting it", entry, e);
|
||||
// the file might have been deleted while reading so if not found we try to delete the flow
|
||||
flows.stream()
|
||||
.filter(flow -> flow.getPath().equals(filePath.toString()))
|
||||
.findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
|
||||
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
|
||||
});
|
||||
log.error("File not found: {}", entry, e);
|
||||
} catch (IOException e) {
|
||||
log.error("Error reading file: {}", entry, e);
|
||||
}
|
||||
@@ -207,11 +207,11 @@ public class FileChangedEventListener {
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) {
|
||||
String content = Files.readString(file, Charset.defaultCharset());
|
||||
Optional<FlowWithSource> flow = parseFlow(content, file);
|
||||
Optional<Flow> flow = parseFlow(content, file);
|
||||
|
||||
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
|
||||
flows.add(FlowWithPath.of(flow.get(), file.toString()));
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(file), content));
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
@@ -223,25 +223,27 @@ public class FileChangedEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
private void flowToFile(FlowInterface flow, Path path) {
|
||||
private void flowToFile(FlowWithSource flow, Path path) {
|
||||
Path defaultPath = path != null ? path : this.buildPath(flow);
|
||||
|
||||
try {
|
||||
Files.writeString(defaultPath, flow.source());
|
||||
Files.writeString(defaultPath, flow.getSource());
|
||||
log.info("Flow {} has been written to file {}", flow.getId(), defaultPath);
|
||||
} catch (IOException e) {
|
||||
log.error("Error writing file: {}", defaultPath, e);
|
||||
}
|
||||
}
|
||||
|
||||
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
|
||||
private Optional<Flow> parseFlow(String content, Path entry) {
|
||||
try {
|
||||
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(getTenantIdFromPath(entry), content, false);
|
||||
modelValidator.validate(flow);
|
||||
Flow flow = yamlParser.parse(content, Flow.class);
|
||||
FlowWithSource withPluginDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
modelValidator.validate(withPluginDefault);
|
||||
return Optional.of(flow);
|
||||
} catch (ConstraintViolationException | FlowProcessingException e) {
|
||||
} catch (ConstraintViolationException e) {
|
||||
log.warn("Error while parsing flow: {}", entry, e);
|
||||
}
|
||||
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@@ -257,11 +259,7 @@ public class FileChangedEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
private Path buildPath(FlowInterface flow) {
|
||||
private Path buildPath(Flow flow) {
|
||||
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
|
||||
}
|
||||
|
||||
private String getTenantIdFromPath(Path path) {
|
||||
return path.getFileName().toString().split("_")[0];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
|
||||
public interface FlowFilesManager {
|
||||
|
||||
FlowWithSource createOrUpdateFlow(GenericFlow flow);
|
||||
FlowWithSource createOrUpdateFlow(Flow flow, String content);
|
||||
|
||||
void deleteFlow(FlowWithSource toDelete);
|
||||
|
||||
|
||||
@@ -1,23 +1,27 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public class LocalFlowFileWatcher implements FlowFilesManager {
|
||||
private final FlowRepositoryInterface flowRepository;
|
||||
private final PluginDefaultService pluginDefaultService;
|
||||
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository, PluginDefaultService pluginDefaultService) {
|
||||
this.flowRepository = flowRepository;
|
||||
this.pluginDefaultService = pluginDefaultService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
|
||||
return flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous))
|
||||
.orElseGet(() -> flowRepository.create(flow));
|
||||
public FlowWithSource createOrUpdateFlow(Flow flow, String content) {
|
||||
FlowWithSource withDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
return flowRepository.findById(null, flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous, content, withDefault))
|
||||
.orElseGet(() -> flowRepository.create(flow, content, withDefault));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@Singleton
|
||||
public class TenantIdSelectorService {
|
||||
|
||||
//For override purpose in Kestra EE
|
||||
public String getTenantId(String tenantId) {
|
||||
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
|
||||
throw new KestraRuntimeException("Tenant id can only be 'main'");
|
||||
}
|
||||
return MAIN_TENANT;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,6 @@
|
||||
micronaut:
|
||||
application:
|
||||
name: kestra
|
||||
# Disable Micronaut Open Telemetry
|
||||
otel:
|
||||
enabled: false
|
||||
router:
|
||||
static-resources:
|
||||
swagger:
|
||||
@@ -15,9 +12,6 @@ micronaut:
|
||||
static:
|
||||
paths: classpath:static
|
||||
mapping: /static/**
|
||||
root:
|
||||
paths: classpath:root
|
||||
mapping: /**
|
||||
server:
|
||||
max-request-size: 10GB
|
||||
multipart:
|
||||
@@ -27,14 +21,13 @@ micronaut:
|
||||
write-idle-timeout: 60m
|
||||
idle-timeout: 60m
|
||||
netty:
|
||||
max-zstd-encode-size: 67108864 # increased to 64MB from the default of 32MB
|
||||
max-chunk-size: 10MB
|
||||
max-header-size: 32768 # increased from the default of 8k
|
||||
responses:
|
||||
file:
|
||||
cache-seconds: 86400
|
||||
cache-control:
|
||||
public: true
|
||||
responses:
|
||||
file:
|
||||
cache-seconds: 86400
|
||||
cache-control:
|
||||
public: true
|
||||
|
||||
# Access log configuration, see https://docs.micronaut.io/latest/guide/index.html#accessLogger
|
||||
access-logger:
|
||||
@@ -142,8 +135,8 @@ kestra:
|
||||
jdbc:
|
||||
queues:
|
||||
min-poll-interval: 25ms
|
||||
max-poll-interval: 500ms
|
||||
poll-switch-interval: 60s
|
||||
max-poll-interval: 1000ms
|
||||
poll-switch-interval: 5s
|
||||
|
||||
cleaner:
|
||||
initial-delay: 1h
|
||||
@@ -172,7 +165,7 @@ kestra:
|
||||
values:
|
||||
recoverMissedSchedules: ALL
|
||||
variables:
|
||||
env-vars-prefix: ENV_
|
||||
env-vars-prefix: KESTRA_
|
||||
cache-enabled: true
|
||||
cache-size: 1000
|
||||
|
||||
@@ -184,6 +177,7 @@ kestra:
|
||||
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
# These URLs will not be authenticated, by default we open some of the Micronaut default endpoints but not all for security reasons
|
||||
open-urls:
|
||||
- "/ping"
|
||||
@@ -198,21 +192,16 @@ kestra:
|
||||
liveness:
|
||||
enabled: true
|
||||
# The expected time between liveness probe.
|
||||
interval: 10s
|
||||
interval: 5s
|
||||
# The timeout used to detect service failures.
|
||||
timeout: 1m
|
||||
timeout: 45s
|
||||
# The time to wait before executing a liveness probe.
|
||||
initialDelay: 1m
|
||||
initialDelay: 45s
|
||||
# The expected time between service heartbeats.
|
||||
heartbeatInterval: 3s
|
||||
service:
|
||||
purge:
|
||||
initial-delay: 1h
|
||||
fixed-delay: 1d
|
||||
retention: 30d
|
||||
anonymous-usage-report:
|
||||
enabled: true
|
||||
uri: https://api.kestra.io/v1/server-events/
|
||||
uri: https://api.kestra.io/v1/reports/usages
|
||||
initial-delay: 5m
|
||||
fixed-delay: 1h
|
||||
|
||||
@@ -228,4 +217,4 @@ otel:
|
||||
- /health
|
||||
- /env
|
||||
- /prometheus
|
||||
propagators: tracecontext, baggage
|
||||
propagators: tracecontext, baggage
|
||||
@@ -13,7 +13,8 @@ import picocli.CommandLine;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class AppTest {
|
||||
@@ -25,7 +26,7 @@ class AppTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(App.class, ctx, "--help");
|
||||
|
||||
assertThat(out.toString()).contains("kestra");
|
||||
assertThat(out.toString(), containsString("kestra"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +42,7 @@ class AppTest {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
|
||||
|
||||
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
|
||||
assertThat(out.toString()).startsWith("Usage: kestra server " + serverType);
|
||||
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,9 +56,9 @@ class AppTest {
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString()).startsWith("Missing required parameters: ");
|
||||
assertThat(out.toString()).contains("Usage: kestra flow namespace update ");
|
||||
assertThat(out.toString()).doesNotContain("MissingParameterException: ");
|
||||
assertThat(out.toString(), startsWith("Missing required parameters: "));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace update "));
|
||||
assertThat(out.toString(), not(containsString("MissingParameterException: ")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package io.kestra.core.validations;
|
||||
package io.kestra.cli;
|
||||
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.exceptions.BeanInstantiationException;
|
||||
@@ -8,7 +8,8 @@ import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
class ServerCommandValidatorTest {
|
||||
|
||||
@@ -39,8 +40,8 @@ class ServerCommandValidatorTest {
|
||||
.start()
|
||||
);
|
||||
final Throwable rootException = getRootException(exception);
|
||||
assertThat(rootException.getClass()).isEqualTo(ServerCommandValidator.ServerCommandException.class);
|
||||
assertThat(rootException.getMessage()).isEqualTo("Incomplete server configuration - missing required properties");
|
||||
assertThat(rootException.getClass(), is(ServerCommandValidator.ServerCommandException.class));
|
||||
assertThat(rootException.getMessage(), is("Incomplete server configuration - missing required properties"));
|
||||
}
|
||||
|
||||
private Throwable getRootException(Throwable exception) {
|
||||
@@ -4,14 +4,12 @@ import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.yaml.snakeyaml.Yaml;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class ConfigPropertiesCommandTest {
|
||||
@Test
|
||||
@@ -22,52 +20,8 @@ class ConfigPropertiesCommandTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString()).contains("activeEnvironments:");
|
||||
assertThat(out.toString()).contains("- test");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldOutputCustomEnvironment() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, "custom-env")) {
|
||||
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString()).contains("activeEnvironments:");
|
||||
assertThat(out.toString()).contains("- custom-env");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReturnZeroOnSuccess() throws Exception {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
ConfigPropertiesCommand cmd = ctx.createBean(ConfigPropertiesCommand.class);
|
||||
int result = cmd.call();
|
||||
|
||||
assertThat(result).isZero();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldOutputValidYaml() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
|
||||
|
||||
String output = out.toString();
|
||||
Yaml yaml = new Yaml();
|
||||
Throwable thrown = catchThrowable(() -> {
|
||||
Map<?, ?> parsed = yaml.load(output);
|
||||
assertThat(parsed).isInstanceOf(Map.class);
|
||||
});
|
||||
assertThat(thrown).isNull();
|
||||
assertThat(out.toString(), containsString("activeEnvironments:"));
|
||||
assertThat(out.toString(), containsString("- test"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowCreateOrUpdateCommandTest {
|
||||
@RetryingTest(5) // flaky on CI but cannot be reproduced even with 100 repetitions
|
||||
@@ -36,7 +38,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -51,7 +53,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +80,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
@@ -91,7 +93,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -104,35 +106,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void should_fail_with_incorrect_tenant() {
|
||||
URL directory = FlowCreateOrUpdateCommandTest.class.getClassLoader().getResource("flows");
|
||||
|
||||
ByteArrayOutputStream err = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(err));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"--tenant", "incorrect",
|
||||
directory.getPath(),
|
||||
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(err.toString()).contains("Tenant id can only be 'main'");
|
||||
err.reset();
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,8 +131,8 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowDotCommandTest {
|
||||
@Test
|
||||
@@ -24,8 +26,8 @@ class FlowDotCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowDotCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("\"root.date\"[shape=box];");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("\"root.date\"[shape=box];"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,8 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class FlowExpandCommandTest {
|
||||
@SuppressWarnings("deprecation")
|
||||
@@ -22,20 +23,22 @@ class FlowExpandCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowExpandCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).isEqualTo("id: include\n" +
|
||||
"namespace: io.kestra.cli\n" +
|
||||
"\n" +
|
||||
"# The list of tasks\n" +
|
||||
"tasks:\n" +
|
||||
"- id: t1\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: \"Lorem ipsum dolor sit amet\"\n" +
|
||||
"- id: t2\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: |\n" +
|
||||
" Lorem ipsum dolor sit amet\n" +
|
||||
" Lorem ipsum dolor sit amet\n");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), is(
|
||||
"id: include\n" +
|
||||
"namespace: io.kestra.cli\n" +
|
||||
"\n" +
|
||||
"# The list of tasks\n" +
|
||||
"tasks:\n" +
|
||||
"- id: t1\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: \"Lorem ipsum dolor sit amet\"\n" +
|
||||
"- id: t2\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: |\n" +
|
||||
" Lorem ipsum dolor sit amet\n" +
|
||||
" Lorem ipsum dolor sit amet\n"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,10 @@ import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowExportCommandTest {
|
||||
@Test
|
||||
@@ -29,8 +32,6 @@ class FlowExportCommandTest {
|
||||
|
||||
// we use the update command to add flows to extract
|
||||
String[] updateArgs = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -39,12 +40,10 @@ class FlowExportCommandTest {
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, updateArgs);
|
||||
assertThat(out.toString()).contains("3 flow(s)");
|
||||
assertThat(out.toString(), containsString("3 flow(s)"));
|
||||
|
||||
// then we export them
|
||||
String[] exportArgs = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -55,11 +54,11 @@ class FlowExportCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowExportCommand.class, ctx, exportArgs);
|
||||
File file = new File("/tmp/flows.zip");
|
||||
assertThat(file.exists()).isTrue();
|
||||
assertThat(file.exists(), is(true));
|
||||
ZipFile zipFile = new ZipFile(file);
|
||||
|
||||
// When launching the test in a suite, there is 4 flows but when lauching individualy there is only 3
|
||||
assertThat(zipFile.stream().count()).isGreaterThanOrEqualTo(3L);
|
||||
assertThat(zipFile.stream().count(), greaterThanOrEqualTo(3L));
|
||||
|
||||
file.delete();
|
||||
}
|
||||
|
||||
@@ -10,7 +10,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowUpdatesCommandTest {
|
||||
@Test
|
||||
@@ -26,8 +28,6 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -37,12 +37,10 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("successfully updated !");
|
||||
assertThat(out.toString(), containsString("successfully updated !"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -54,7 +52,7 @@ class FlowUpdatesCommandTest {
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString()).contains("successfully updated !");
|
||||
assertThat(out.toString(), containsString("successfully updated !"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,8 +70,6 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -83,13 +79,11 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -98,12 +92,10 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -113,36 +105,7 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalidWithNamespace() {
|
||||
URL directory = FlowUpdatesCommandTest.class.getClassLoader().getResource("flows");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"--namespace",
|
||||
"io.kestra.cli",
|
||||
"--delete",
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("Invalid entity: flow.namespace: main_io.kestra.outsider_quattro_-1 - flow namespace is invalid");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,8 +121,6 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -169,8 +130,8 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowValidateCommandTest {
|
||||
@Test
|
||||
@@ -22,8 +24,8 @@ class FlowValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("✓ - io.kestra.cli / include"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,10 +41,10 @@ class FlowValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - system / warning");
|
||||
assertThat(out.toString()).contains("⚠ - tasks[0] is deprecated");
|
||||
assertThat(out.toString()).contains("ℹ - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("✓ - system / warning"));
|
||||
assertThat(out.toString(), containsString("⚠ - tasks[0] is deprecated"));
|
||||
assertThat(out.toString(), containsString("ℹ - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,13 +10,15 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
public class SingleFlowCommandsTest {
|
||||
|
||||
class SingleFlowCommandsTest {
|
||||
|
||||
@Test
|
||||
void all() {
|
||||
URL flow = SingleFlowCommandsTest.class.getClassLoader().getResource("crudFlow/date.yml");
|
||||
URL flow = SingleFlowCommandsTest.class.getClassLoader().getResource("flows/quattro.yml");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
@@ -25,6 +27,19 @@ class SingleFlowCommandsTest {
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] deleteArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.outsider",
|
||||
"quattro"
|
||||
};
|
||||
PicocliRunner.call(FlowDeleteCommand.class, ctx, deleteArgs);
|
||||
|
||||
assertThat(out.toString(), containsString("Flow successfully deleted !"));
|
||||
out.reset();
|
||||
|
||||
String[] createArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
@@ -34,36 +49,23 @@ class SingleFlowCommandsTest {
|
||||
};
|
||||
PicocliRunner.call(FlowCreateCommand.class, ctx, createArgs);
|
||||
|
||||
assertThat(out.toString()).contains("Flow successfully created !");
|
||||
assertThat(out.toString(), containsString("Flow successfully created !"));
|
||||
|
||||
out.reset();
|
||||
|
||||
String[] updateArgs = {
|
||||
out.reset();String[] updateArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
flow.getPath(),
|
||||
"io.kestra.cli",
|
||||
"date"
|
||||
"io.kestra.outsider",
|
||||
"quattro"
|
||||
};
|
||||
PicocliRunner.call(FlowUpdateCommand.class, ctx, updateArgs);
|
||||
|
||||
assertThat(out.toString()).contains("Flow successfully updated !");
|
||||
|
||||
assertThat(out.toString(), containsString("Flow successfully updated !"));
|
||||
out.reset();
|
||||
|
||||
String[] deleteArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.cli",
|
||||
"date"
|
||||
};
|
||||
PicocliRunner.call(FlowDeleteCommand.class, ctx, deleteArgs);
|
||||
|
||||
assertThat(out.toString()).contains("Flow successfully deleted !");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -10,7 +10,9 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class TemplateValidateCommandTest {
|
||||
@Test
|
||||
@@ -26,9 +28,9 @@ class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flow"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,8 +46,6 @@ class TemplateValidateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -54,9 +54,9 @@ class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flow"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class FlowNamespaceCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class FlowNamespaceCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra flow namespace");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,10 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class FlowNamespaceUpdateCommandTest {
|
||||
@Test
|
||||
@@ -36,7 +39,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
|
||||
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -52,7 +55,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
|
||||
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,9 +81,9 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flows");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flows"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,7 +111,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("3 flow(s)");
|
||||
assertThat(out.toString(), containsString("3 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
@@ -122,7 +125,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -136,7 +139,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,8 +165,8 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,8 +195,8 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("io.kestra.override");
|
||||
assertThat(out.toString()).doesNotContain("io.kestra.cli");
|
||||
assertThat(out.toString(), containsString("io.kestra.override"));
|
||||
assertThat(out.toString(), not(containsString("io.kestra.cli")));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class NamespaceCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class NamespaceCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(NamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class NamespaceFilesCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class NamespaceFilesCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(NamespaceFilesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace files");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace files"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,8 +14,8 @@ import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class NamespaceFilesUpdateCommandTest {
|
||||
@Test
|
||||
@@ -31,8 +31,6 @@ class NamespaceFilesUpdateCommandTest {
|
||||
|
||||
String to = "/some/directory";
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -63,8 +61,6 @@ class NamespaceFilesUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -94,8 +90,6 @@ class NamespaceFilesUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
|
||||
@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
class KvCommandTest {
|
||||
@Test
|
||||
@@ -19,8 +21,8 @@ class KvCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(KvCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace kv");
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace kv"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Map;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
class KvUpdateCommandTest {
|
||||
@Test
|
||||
@@ -28,8 +28,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -41,10 +39,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("string").get()).isEqualTo(new KVValue("stringValue"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("string").get()).isEqualTo("\"stringValue\"");
|
||||
assertThat(kvStore.getValue("string").get(), is(new KVValue("stringValue")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("string").get(), is("\"stringValue\""));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,8 +54,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -69,10 +65,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("int").get()).isEqualTo(new KVValue(1));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("int").get()).isEqualTo("1");
|
||||
assertThat(kvStore.getValue("int").get(), is(new KVValue(1)));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("int").get(), is("1"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,8 +80,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -99,10 +93,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("intStr").get()).isEqualTo(new KVValue("1"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("intStr").get()).isEqualTo("\"1\"");
|
||||
assertThat(kvStore.getValue("intStr").get(), is(new KVValue("1")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("intStr").get(), is("\"1\""));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,8 +108,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -127,10 +119,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("object").get()).isEqualTo(new KVValue(Map.of("some", "json")));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("object").get()).isEqualTo("{some:\"json\"}");
|
||||
assertThat(kvStore.getValue("object").get(), is(new KVValue(Map.of("some", "json"))));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("object").get(), is("{some:\"json\"}"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,8 +134,6 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -157,10 +147,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("objectStr").get()).isEqualTo(new KVValue("{\"some\":\"json\"}"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("objectStr").get()).isEqualTo("\"{\\\"some\\\":\\\"json\\\"}\"");
|
||||
assertThat(kvStore.getValue("objectStr").get(), is(new KVValue("{\"some\":\"json\"}")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("objectStr").get(), is("\"{\\\"some\\\":\\\"json\\\"}\""));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,8 +167,6 @@ class KvUpdateCommandTest {
|
||||
Files.write(file.toPath(), "{\"some\":\"json\",\"from\":\"file\"}".getBytes());
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -191,10 +179,10 @@ class KvUpdateCommandTest {
|
||||
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
|
||||
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("objectFromFile").get()).isEqualTo(new KVValue(Map.of("some", "json", "from", "file")));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("objectFromFile").get()).isEqualTo("{some:\"json\",from:\"file\"}");
|
||||
assertThat(kvStore.getValue("objectFromFile").get(), is(new KVValue(Map.of("some", "json", "from", "file"))));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("objectFromFile").get(), is("{some:\"json\",from:\"file\"}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user