Compare commits

..

1 Commits

Author SHA1 Message Date
Loïc Mathieu
181460e5d2 fix(system): possible NPE on trigger when computing variables 2025-06-23 17:39:03 +02:00
883 changed files with 36837 additions and 57079 deletions

View File

@@ -24,9 +24,12 @@ In the meantime, you can move onto the next step...
---
### Development:
- (Optional) By default, your dev server will target `localhost:8080`. If your backend is running elsewhere, you can create `.env.development.local` under `ui` folder with this content:
```
VITE_APP_API_URL={myApiUrl}
- Create a `.env.development.local` file in the `ui` folder and paste the following:
```bash
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
VITE_APP_API_URL=http://localhost:8080
```
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
@@ -71,6 +74,9 @@ kestra:
path: /tmp/kestra-wd/tmp
anonymous-usage-report:
enabled: false
server:
basic-auth:
enabled: false
datasources:
postgres:

View File

@@ -80,6 +80,7 @@ python3 -m pip install virtualenv
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
- `npm install`
- create a file `ui/.env.development.local` with content `VITE_APP_API_URL=http://localhost:8080` (or your actual server url)
- `npm run dev` will start the development server with hot reload.
- The server start by default on port 5173 and is reachable on `http://localhost:5173`
- You can run `npm run build` in order to build the front-end that will be delivered from the backend (without running the `npm run dev`) above.

View File

@@ -2,7 +2,7 @@ name: Auto-Translate UI keys and create PR
on:
schedule:
- cron: "0 9-21/3 * * *" # Every 3 hours from 9 AM to 9 PM
- cron: "0 9-21 * * *" # Every hour from 9 AM to 9 PM
workflow_dispatch:
inputs:
retranslate_modified_keys:
@@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
name: Checkout
with:
fetch-depth: 0
@@ -43,6 +43,9 @@ jobs:
with:
node-version: "20.x"
- name: Check keys matching
run: node ui/src/translations/check.js
- name: Set up Git
run: |
git config --global user.name "GitHub Action"
@@ -61,7 +64,4 @@ jobs:
fi
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
gh pr create --title "Translations from en.json" --body $'This PR was created automatically by a GitHub Action.\n\nSomeone from the @kestra-io/frontend team needs to review and merge.' --base ${{ github.ref_name }} --head $BRANCH_NAME
- name: Check keys matching
run: node ui/src/translations/check.js
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller

View File

@@ -27,7 +27,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v4
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.

147
.github/workflows/docker.yml vendored Normal file
View File

@@ -0,0 +1,147 @@
name: Create Docker images on Release
on:
workflow_dispatch:
inputs:
retag-latest:
description: 'Retag latest Docker images'
required: true
type: string
default: "true"
options:
- "true"
- "false"
release-tag:
description: 'Kestra Release Tag'
required: false
type: string
plugin-version:
description: 'Plugin version'
required: false
type: string
default: "LATEST"
env:
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
jobs:
plugins:
name: List Plugins
runs-on: ubuntu-latest
outputs:
plugins: ${{ steps.plugins.outputs.plugins }}
steps:
# Checkout
- uses: actions/checkout@v4
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
id: plugins
with:
plugin-version: ${{ env.PLUGIN_VERSION }}
docker:
name: Publish Docker
needs: [ plugins ]
runs-on: ubuntu-latest
strategy:
matrix:
image:
- name: "-no-plugins"
plugins: ""
packages: jattach
python-libs: ""
- name: ""
plugins: ${{needs.plugins.outputs.plugins}}
packages: python3 python-is-python3 python3-pip curl jattach
python-libs: kestra
steps:
- uses: actions/checkout@v4
# Vars
- name: Set image name
id: vars
run: |
if [[ "${{ inputs.release-tag }}" == "" ]]; then
TAG=${GITHUB_REF#refs/*/}
echo "tag=${TAG}" >> $GITHUB_OUTPUT
else
TAG="${{ inputs.release-tag }}"
echo "tag=${TAG}" >> $GITHUB_OUTPUT
fi
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
else
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
fi
# Download release
- name: Download release
uses: robinraju/release-downloader@v1.12
with:
tag: ${{steps.vars.outputs.tag}}
fileName: 'kestra-*'
out-file-path: build/executable
- name: Copy exe to image
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker setup
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Fix Qemu
shell: bash
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
# Docker Build and push
- name: Push to Docker Hub
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
platforms: linux/amd64,linux/arm64
build-args: |
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
APT_PACKAGES=${{ matrix.image.packages }}
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
- name: Install regctl
if: github.event.inputs.retag-latest == 'true'
uses: regclient/actions/regctl-installer@main
- name: Retag to latest
if: github.event.inputs.retag-latest == 'true'
run: |
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
end:
runs-on: ubuntu-latest
needs:
- docker
if: always()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
steps:
# Slack
- name: Slack notification
uses: Gamesight/slack-workflow-status@master
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
name: GitHub Actions
icon_emoji: ':github-actions:'
channel: 'C02DQ1A7JLR' # _int_git channel

View File

@@ -19,7 +19,7 @@ on:
default: "no input"
jobs:
check:
timeout-minutes: 15
timeout-minutes: 10
runs-on: ubuntu-latest
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
@@ -32,19 +32,10 @@ jobs:
password: ${{ github.token }}
- name: Checkout kestra
uses: actions/checkout@v5
uses: actions/checkout@v4
with:
path: kestra
# Setup build
- uses: kestra-io/actions/.github/actions/setup-build@main
name: Setup - Build
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
- name: Install Npm dependencies
run: |
cd kestra/ui
@@ -53,8 +44,8 @@ jobs:
- name: Run E2E Tests
run: |
cd kestra
sh build-and-start-e2e-tests.sh
cd kestra/ui
npm run test:e2e
- name: Upload Playwright Report as Github artifact
# 'With this report, you can analyze locally the results of the tests. see https://playwright.dev/docs/ci-intro#html-report'
@@ -62,7 +53,7 @@ jobs:
if: ${{ !cancelled() }}
with:
name: playwright-report
path: kestra/ui/playwright-report/
path: kestra/playwright-report/
retention-days: 7
# Allure check
# TODO I don't know what it should do
@@ -83,4 +74,4 @@ jobs:
# baseUrl: "https://internal.dev.kestra.io"
# prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
# copyLatest: true
# ignoreMissingResults: true
# ignoreMissingResults: true

View File

@@ -21,12 +21,12 @@ jobs:
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions

View File

@@ -33,13 +33,13 @@ jobs:
exit 1;
fi
# Checkout
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
fetch-depth: 0
path: kestra
# Checkout GitHub Actions
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions

View File

@@ -4,8 +4,9 @@ on:
workflow_dispatch:
inputs:
plugin-version:
description: "plugins version"
required: false
description: "Kestra version"
default: 'LATEST'
required: true
type: string
push:
branches:
@@ -33,7 +34,7 @@ jobs:
if: "!startsWith(github.ref, 'refs/heads/releases')"
uses: ./.github/workflows/workflow-release.yml
with:
plugin-version: ${{ inputs.plugin-version != '' && inputs.plugin-version || (github.ref == 'refs/heads/develop' && 'LATEST-SNAPSHOT' || 'LATEST') }}
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
@@ -42,8 +43,7 @@ jobs:
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
end:
runs-on: ubuntu-latest
needs:

View File

@@ -56,10 +56,6 @@ jobs:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
e2e-tests:
name: E2E - Tests
uses: ./.github/workflows/e2e.yml
end:
name: End
runs-on: ubuntu-latest

View File

@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
fetch-depth: 0

View File

@@ -34,7 +34,7 @@ jobs:
fi
# Checkout
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
fetch-depth: 0

View File

@@ -17,12 +17,12 @@ jobs:
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
@@ -66,12 +66,12 @@ jobs:
actions: read
steps:
# Checkout
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
@@ -87,7 +87,7 @@ jobs:
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.32.0
uses: aquasecurity/trivy-action@0.31.0
with:
image-ref: kestra/kestra:develop
format: 'template'
@@ -111,12 +111,12 @@ jobs:
actions: read
steps:
# Checkout
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Checkout GitHub Actions
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: kestra-io/actions
path: actions
@@ -132,7 +132,7 @@ jobs:
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.32.0
uses: aquasecurity/trivy-action@0.31.0
with:
image-ref: kestra/kestra:latest
format: table

View File

@@ -29,7 +29,7 @@ jobs:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
name: Checkout - Current ref
with:
fetch-depth: 0

View File

@@ -1,7 +1,23 @@
name: Build Artifacts
on:
workflow_call: {}
workflow_call:
inputs:
plugin-version:
description: "Kestra version"
default: 'LATEST'
required: true
type: string
outputs:
docker-tag:
value: ${{ jobs.build.outputs.docker-tag }}
description: "The Docker image Tag for Kestra"
docker-artifact-name:
value: ${{ jobs.build.outputs.docker-artifact-name }}
description: "The GitHub artifact containing the Kestra docker image name."
plugins:
value: ${{ jobs.build.outputs.plugins }}
description: "The Kestra plugins list used for the build."
jobs:
build:
@@ -15,7 +31,7 @@ jobs:
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
steps:
- name: Checkout - Current ref
uses: actions/checkout@v5
uses: actions/checkout@v4
with:
fetch-depth: 0
@@ -52,7 +68,7 @@ jobs:
if [[ $TAG = "master" || $TAG == v* ]]; then
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
else
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ $PLUGINS" >> $GITHUB_OUTPUT
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
fi
# Build
@@ -66,6 +82,55 @@ jobs:
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker Tag
- name: Setup - Docker vars
id: vars
shell: bash
run: |
TAG=${GITHUB_REF#refs/*/}
if [[ $TAG = "master" ]]
then
TAG="latest";
elif [[ $TAG = "develop" ]]
then
TAG="develop";
elif [[ $TAG = v* ]]
then
TAG="${TAG}";
else
TAG="build-${{ github.run_id }}";
fi
echo "tag=${TAG}" >> $GITHUB_OUTPUT
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
# Docker setup
- name: Docker - Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Fix Qemu
shell: bash
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
- name: Docker - Setup Buildx
uses: docker/setup-buildx-action@v3
# Docker Build
- name: Docker - Build & export image
uses: docker/build-push-action@v6
if: "!startsWith(github.ref, 'refs/tags/v')"
with:
context: .
push: false
file: Dockerfile
tags: |
kestra/kestra:${{ steps.vars.outputs.tag }}
build-args: |
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
# Upload artifacts
- name: Artifacts - Upload JAR
uses: actions/upload-artifact@v4
@@ -78,3 +143,10 @@ jobs:
with:
name: exe
path: build/executable/
- name: Artifacts - Upload Docker
uses: actions/upload-artifact@v4
if: "!startsWith(github.ref, 'refs/tags/v')"
with:
name: ${{ steps.vars.outputs.artifact }}
path: /tmp/${{ steps.vars.outputs.artifact }}.tar

View File

@@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v5
uses: actions/checkout@v4
- name: Cache Node Modules
id: cache-node-modules
@@ -39,6 +39,7 @@ jobs:
key: playwright-${{ hashFiles('ui/package-lock.json') }}
- name: Npm - install
shell: bash
if: steps.cache-node-modules.outputs.cache-hit != 'true'
working-directory: ui
run: npm ci
@@ -51,20 +52,35 @@ jobs:
workdir: ui
- name: Npm - Run build
shell: bash
working-directory: ui
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: npm run build
- name: Run front-end unit tests
working-directory: ui
run: npm run test:unit -- --coverage
- name: Storybook - Install Playwright
shell: bash
working-directory: ui
if: steps.cache-playwright.outputs.cache-hit != 'true'
run: npx playwright install --with-deps
- name: Run storybook component tests
- name: Run front-end unit tests
shell: bash
working-directory: ui
run: npm run test:storybook -- --coverage
run: npm run test:cicd
- name: Codecov - Upload coverage reports
uses: codecov/codecov-action@v5
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend
- name: Codecov - Upload test results
uses: codecov/test-results-action@v1
if: ${{ !cancelled() }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN && github.event.pull_request.head.repo.full_name == github.repository }}
flags: frontend

View File

@@ -1,17 +1,14 @@
name: Github - Release
on:
workflow_dispatch:
workflow_call:
secrets:
GH_PERSONAL_TOKEN:
description: "The Github personal token."
required: true
SLACK_RELEASES_WEBHOOK_URL:
description: "The Slack webhook URL."
required: true
push:
tags:
- '*'
jobs:
publish:
@@ -20,14 +17,14 @@ jobs:
steps:
# Check out
- name: Checkout - Repository
uses: actions/checkout@v5
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: true
# Checkout GitHub Actions
- name: Checkout - Actions
uses: actions/checkout@v5
uses: actions/checkout@v4
with:
repository: kestra-io/actions
sparse-checkout-cone-mode: true
@@ -38,31 +35,18 @@ jobs:
# Download Exec
# Must be done after checkout actions
- name: Artifacts - Download executable
uses: actions/download-artifact@v5
uses: actions/download-artifact@v4
if: startsWith(github.ref, 'refs/tags/v')
with:
name: exe
path: build/executable
- name: Check if current tag is latest
id: is_latest
run: |
latest_tag=$(git tag | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | sed 's/^v//' | sort -V | tail -n1)
current_tag="${GITHUB_REF_NAME#v}"
if [ "$current_tag" = "$latest_tag" ]; then
echo "latest=true" >> $GITHUB_OUTPUT
else
echo "latest=false" >> $GITHUB_OUTPUT
fi
env:
GITHUB_REF_NAME: ${{ github.ref_name }}
# GitHub Release
- name: Create GitHub release
uses: ./actions/.github/actions/github-release
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
env:
MAKE_LATEST: ${{ steps.is_latest.outputs.latest }}
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
@@ -78,11 +62,4 @@ jobs:
"new_version": "${{ github.ref_name }}",
"github_repository": "${{ github.repository }}",
"github_actor": "${{ github.actor }}"
}
- name: Merge Release Notes
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
uses: ./actions/.github/actions/github-release-note-merge
env:
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
RELEASE_TAG: ${{ github.ref_name }}
}

View File

@@ -1,37 +1,22 @@
name: Create Docker images on Release
name: Publish - Docker
on:
workflow_dispatch:
inputs:
retag-latest:
description: 'Retag latest Docker images'
required: true
type: choice
default: "false"
options:
- "true"
- "false"
release-tag:
description: 'Kestra Release Tag (by default, deduced with the ref)'
required: false
type: string
plugin-version:
description: 'Plugin version'
description: "Kestra version"
default: 'LATEST'
required: false
type: string
default: "LATEST"
force-download-artifact:
description: 'Force download artifact'
required: false
type: choice
type: string
default: "true"
options:
- "true"
- "false"
workflow_call:
inputs:
plugin-version:
description: "Plugin version"
description: "Kestra version"
default: 'LATEST'
required: false
type: string
@@ -48,93 +33,47 @@ on:
description: "The Dockerhub password."
required: true
env:
PLUGIN_VERSION: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
jobs:
plugins:
name: List Plugins
runs-on: ubuntu-latest
outputs:
plugins: ${{ steps.plugins.outputs.plugins }}
steps:
# Checkout
- uses: actions/checkout@v5
# Get Plugins List
- name: Get Plugins List
uses: ./.github/actions/plugins-list
id: plugins
with: # remap LATEST-SNAPSHOT to LATEST
plugin-version: ${{ env.PLUGIN_VERSION == 'LATEST-SNAPSHOT' && 'LATEST' || env.PLUGIN_VERSION }}
# ********************************************************************************************************************
# Build
# ********************************************************************************************************************
build-artifacts:
name: Build Artifacts
if: ${{ inputs.force-download-artifact == 'true' }}
if: ${{ github.event.inputs.force-download-artifact == 'true' }}
uses: ./.github/workflows/workflow-build-artifacts.yml
docker:
name: Publish Docker
needs: [ plugins, build-artifacts ]
if: always()
with:
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
# ********************************************************************************************************************
# Docker
# ********************************************************************************************************************
publish:
name: Publish - Docker
runs-on: ubuntu-latest
needs: build-artifacts
if: |
always() &&
(needs.build-artifacts.result == 'success' ||
github.event.inputs.force-download-artifact != 'true')
env:
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
strategy:
matrix:
image:
- name: "-no-plugins"
plugins: ""
- tag: -no-plugins
packages: jattach
python-libs: ""
- name: ""
plugins: ${{needs.plugins.outputs.plugins}}
packages: python3 python-is-python3 python3-pip curl jattach
python-libs: kestra
plugins: false
python-libraries: ""
- tag: ""
plugins: true
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
python-libraries: kestra
steps:
- uses: actions/checkout@v5
# Vars
- name: Set image name
id: vars
run: |
if [[ "${{ inputs.release-tag }}" == "" ]]; then
TAG=${GITHUB_REF#refs/*/}
echo "tag=${TAG}" >> $GITHUB_OUTPUT
else
TAG="${{ inputs.release-tag }}"
echo "tag=${TAG}" >> $GITHUB_OUTPUT
fi
if [[ $GITHUB_REF == refs/tags/* ]]; then
if [[ $TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
# this will remove the patch version number
MINOR_SEMVER=${TAG%.*}
echo "minor_semver=${MINOR_SEMVER}" >> $GITHUB_OUTPUT
else
echo "Tag '$TAG' is not a valid semver (vMAJOR.MINOR.PATCH), skipping minor_semver"
fi
fi
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
else
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
fi
# Download executable from artifact
- name: Artifacts - Download executable
uses: actions/download-artifact@v5
with:
name: exe
path: build/executable
- name: Copy exe to image
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
- name: Checkout - Current ref
uses: actions/checkout@v4
# Docker setup
- name: Set up QEMU
- name: Docker - Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Fix Qemu
@@ -142,59 +81,66 @@ jobs:
run: |
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
- name: Set up Docker Buildx
- name: Docker - Setup Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Login to DockerHub
- name: Docker - Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
# # Get Plugins List
- name: Plugins - Get List
uses: ./.github/actions/plugins-list
id: plugins-list
if: ${{ matrix.image.plugins}}
with:
plugin-version: ${{ env.PLUGIN_VERSION }}
# Vars
- name: Docker - Set variables
shell: bash
id: vars
run: |
TAG=${GITHUB_REF#refs/*/}
PLUGINS="${{ matrix.image.plugins == true && steps.plugins-list.outputs.plugins || '' }}"
if [[ $TAG == v* ]]; then
TAG="${TAG}";
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
elif [[ $TAG = "develop" ]]; then
TAG="develop";
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
else
TAG="build-${{ github.run_id }}";
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
fi
echo "tag=${TAG}${{ matrix.image.tag }}" >> $GITHUB_OUTPUT
# Build Docker Image
- name: Artifacts - Download executable
uses: actions/download-artifact@v4
with:
name: exe
path: build/executable
- name: Docker - Copy exe to image
shell: bash
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
# Docker Build and push
- name: Push to Docker Hub
- name: Docker - Build image
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
tags: kestra/kestra:${{ steps.vars.outputs.tag }}
platforms: linux/amd64,linux/arm64
build-args: |
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
APT_PACKAGES=${{ matrix.image.packages }}
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
- name: Install regctl
if: startsWith(github.ref, 'refs/tags/v')
uses: regclient/actions/regctl-installer@main
- name: Retag to minor semver version
if: startsWith(github.ref, 'refs/tags/v') && steps.vars.outputs.minor_semver != ''
run: |
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.minor_semver, matrix.image.name) }}
- name: Retag to latest
if: startsWith(github.ref, 'refs/tags/v') && inputs.retag-latest == 'true'
run: |
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
end:
runs-on: ubuntu-latest
needs:
- docker
if: always()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
steps:
# Slack
- name: Slack notification
uses: Gamesight/slack-workflow-status@master
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
name: GitHub Actions
icon_emoji: ':github-actions:'
channel: 'C02DQ1A7JLR' # _int_git channel
PYTHON_LIBRARIES=${{ matrix.image.python-libraries }}

View File

@@ -25,7 +25,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout - Current ref
uses: actions/checkout@v5
uses: actions/checkout@v4
# Setup build
- name: Setup - Build
@@ -39,8 +39,8 @@ jobs:
- name: Publish - Release package to Maven Central
shell: bash
env:
ORG_GRADLE_PROJECT_mavenCentralUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_mavenCentralPassword: ${{ secrets.SONATYPE_PASSWORD }}
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
@@ -50,7 +50,7 @@ jobs:
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
./gradlew publishToMavenCentral
./gradlew publishToSonatype ${{ startsWith(github.ref, 'refs/tags/v') && 'closeAndReleaseSonatypeStagingRepository' || '' }}
# Gradle dependency
- name: Java - Gradle dependency graph

View File

@@ -1,16 +0,0 @@
name: Pull Request - Delete Docker
on:
pull_request:
types: [closed]
jobs:
publish:
name: Pull Request - Delete Docker
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
runs-on: ubuntu-latest
steps:
- uses: dataaxiom/ghcr-cleanup-action@v1
with:
package: kestra-pr
delete-tags: ${{ github.event.pull_request.number }}

View File

@@ -1,78 +0,0 @@
name: Pull Request - Publish Docker
on:
pull_request:
branches:
- develop
jobs:
build-artifacts:
name: Build Artifacts
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
uses: ./.github/workflows/workflow-build-artifacts.yml
publish:
name: Publish Docker
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
runs-on: ubuntu-latest
needs: build-artifacts
env:
GITHUB_IMAGE_PATH: "ghcr.io/kestra-io/kestra-pr"
steps:
- name: Checkout - Current ref
uses: actions/checkout@v5
with:
fetch-depth: 0
# Docker setup
- name: Docker - Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Docker - Setup Docker Buildx
uses: docker/setup-buildx-action@v3
# Docker Login
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Build Docker Image
- name: Artifacts - Download executable
uses: actions/download-artifact@v5
with:
name: exe
path: build/executable
- name: Docker - Copy exe to image
shell: bash
run: |
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
- name: Docker - Build image
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile.pr
push: true
tags: ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}
platforms: linux/amd64,linux/arm64
# Add comment on pull request
- name: Add comment to PR
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
await github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `**🐋 Docker image**: \`${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}\`\n` +
`\n` +
`\`\`\`bash\n` +
`docker run --pull=always --rm -it -p 8080:8080 --user=root -v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }} server local\n` +
`\`\`\``
})

View File

@@ -4,7 +4,7 @@ on:
workflow_dispatch:
inputs:
plugin-version:
description: "plugins version"
description: "Kestra version"
default: 'LATEST'
required: false
type: string
@@ -16,7 +16,7 @@ on:
workflow_call:
inputs:
plugin-version:
description: "plugins version"
description: "Kestra version"
default: 'LATEST'
required: false
type: string
@@ -42,25 +42,21 @@ on:
SONATYPE_GPG_FILE:
description: "The Sonatype GPG file."
required: true
GH_PERSONAL_TOKEN:
description: "GH personnal Token."
required: true
SLACK_RELEASES_WEBHOOK_URL:
description: "Slack webhook for releases channel."
required: true
jobs:
build-artifacts:
name: Build - Artifacts
uses: ./.github/workflows/workflow-build-artifacts.yml
with:
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
Docker:
name: Publish Docker
needs: build-artifacts
uses: ./.github/workflows/workflow-publish-docker.yml
if: github.ref == 'refs/heads/develop' || inputs.publish-docker == 'true'
if: startsWith(github.ref, 'refs/heads/develop') || github.event.inputs.publish-docker == 'true'
with:
force-download-artifact: 'false'
plugin-version: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
@@ -81,5 +77,4 @@ jobs:
if: startsWith(github.ref, 'refs/tags/v')
uses: ./.github/workflows/workflow-github-release.yml
secrets:
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}

View File

@@ -27,7 +27,7 @@ jobs:
ui: ${{ steps.changes.outputs.ui }}
backend: ${{ steps.changes.outputs.backend }}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
if: "!startsWith(github.ref, 'refs/tags/v')"
- uses: dorny/paths-filter@v3
if: "!startsWith(github.ref, 'refs/tags/v')"

View File

@@ -3,12 +3,10 @@
# Format: <RepositoryName>:<GroupId>:<ArtifactId>:<Version>
#
# Uncomment the lines corresponding to the plugins to be installed:
#plugin-ai:io.kestra.plugin:plugin-ai:LATEST
#plugin-airbyte:io.kestra.plugin:plugin-airbyte:LATEST
#plugin-airflow:io.kestra.plugin:plugin-airflow:LATEST
#plugin-amqp:io.kestra.plugin:plugin-amqp:LATEST
#plugin-ansible:io.kestra.plugin:plugin-ansible:LATEST
#plugin-anthropic:io.kestra.plugin:plugin-anthropic:LATEST
#plugin-aws:io.kestra.plugin:plugin-aws:LATEST
#plugin-azure:io.kestra.plugin:plugin-azure:LATEST
#plugin-cassandra:io.kestra.plugin:plugin-cassandra:LATEST
@@ -19,7 +17,6 @@
#plugin-databricks:io.kestra.plugin:plugin-databricks:LATEST
#plugin-datahub:io.kestra.plugin:plugin-datahub:LATEST
#plugin-dataform:io.kestra.plugin:plugin-dataform:LATEST
#plugin-datagen:io.kestra.plugin:plugin-datagen:LATEST
#plugin-dbt:io.kestra.plugin:plugin-dbt:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-db2:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-mongodb:LATEST
@@ -27,13 +24,11 @@
#plugin-debezium:io.kestra.plugin:plugin-debezium-oracle:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-postgres:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-sqlserver:LATEST
#plugin-deepseek:io.kestra.plugin:plugin-deepseek:LATEST
#plugin-docker:io.kestra.plugin:plugin-docker:LATEST
#plugin-elasticsearch:io.kestra.plugin:plugin-elasticsearch:LATEST
#plugin-fivetran:io.kestra.plugin:plugin-fivetran:LATEST
#plugin-fs:io.kestra.plugin:plugin-fs:LATEST
#plugin-gcp:io.kestra.plugin:plugin-gcp:LATEST
#plugin-gemini:io.kestra.plugin:plugin-gemini:LATEST
#plugin-git:io.kestra.plugin:plugin-git:LATEST
#plugin-github:io.kestra.plugin:plugin-github:LATEST
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
@@ -68,38 +63,31 @@
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
#plugin-langchain4j:io.kestra.plugin:plugin-langchain4j:LATEST
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
#plugin-meilisearch:io.kestra.plugin:plugin-meilisearch:LATEST
#plugin-minio:io.kestra.plugin:plugin-minio:LATEST
#plugin-mistral:io.kestra.plugin:plugin-mistral:LATEST
#plugin-modal:io.kestra.plugin:plugin-modal:LATEST
#plugin-mongodb:io.kestra.plugin:plugin-mongodb:LATEST
#plugin-mqtt:io.kestra.plugin:plugin-mqtt:LATEST
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
#plugin-notion:io.kestra.plugin:plugin-notion:LATEST
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
#plugin-perplexity:io.kestra.plugin:plugin-perplexity:LATEST
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-bun:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-deno:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jython:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-lua:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-nashorn:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-node:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-perl:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-php:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-powershell:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-python:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-r:LATEST

305
AGENTS.md
View File

@@ -1,305 +0,0 @@
# Kestra AGENTS.md
This file provides guidance for AI coding agents working on the Kestra project. Kestra is an open-source data orchestration and scheduling platform built with Java (Micronaut) and Vue.js.
## Repository Layout
- **`core/`**: Core Kestra framework and task definitions
- **`cli/`**: Command-line interface and server implementation
- **`webserver/`**: REST API server implementation
- **`ui/`**: Vue.js frontend application
- **`jdbc-*`**: Database connector modules (H2, MySQL, PostgreSQL)
- **`script/`**: Script execution engine
- **`storage-local/`**: Local file storage implementation
- **`repository-memory/`**: In-memory repository implementation
- **`runner-memory/`**: In-memory execution runner
- **`processor/`**: Task processing engine
- **`model/`**: Data models and Data Transfer Objects
- **`platform/`**: Platform-specific implementations
- **`tests/`**: Integration test framework
- **`e2e-tests/`**: End-to-end testing suite
## Development Environment
### Prerequisites
- Java 21+
- Node.js 22+ and npm
- Python 3, pip, and python venv
- Docker & Docker Compose
- Gradle (wrapper included)
### Quick Setup with Devcontainer
The easiest way to get started is using the provided devcontainer:
1. Install VSCode Remote Development extension
2. Run `Dev Containers: Open Folder in Container...` from command palette
3. Select the Kestra root folder
4. Wait for Gradle build to complete
### Manual Setup
1. Clone the repository
2. Run `./gradlew build` to build the backend
3. Navigate to `ui/` and run `npm install`
4. Create configuration files as described below
## Configuration Files
### Backend Configuration
Create `cli/src/main/resources/application-override.yml`:
**Local Mode (H2 database):**
```yaml
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
**Standalone Mode (PostgreSQL):**
```yaml
kestra:
repository:
type: postgres
storage:
type: local
local:
base-path: "/app/storage"
queue:
type: postgres
tasks:
tmp-dir:
path: /tmp/kestra-wd/tmp
anonymous-usage-report:
enabled: false
datasources:
postgres:
url: jdbc:postgresql://host.docker.internal:5432/kestra
driverClassName: org.postgresql.Driver
username: kestra
password: k3str4
flyway:
datasources:
postgres:
enabled: true
locations:
- classpath:migrations/postgres
ignore-migration-patterns: "*:missing,*:future"
out-of-order: true
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
### Frontend Configuration
Create `ui/.env.development.local` for environment variables.
## Running the Application
### Backend
- **Local mode**: `./gradlew runLocal` (uses H2 database)
- **Standalone mode**: Use VSCode Run and Debug with main class `io.kestra.cli.App` and args `server standalone`
### Frontend
- Navigate to `ui/` directory
- Run `npm run dev` for development server (port 5173)
- Run `npm run build` for production build
## Building and Testing
### Backend
```bash
# Build the project
./gradlew build
# Run tests
./gradlew test
# Run specific module tests
./gradlew :core:test
# Clean build
./gradlew clean build
```
### Frontend
```bash
cd ui
npm install
npm run test
npm run lint
npm run build
```
### End-to-End Tests
```bash
# Build and start E2E tests
./build-and-start-e2e-tests.sh
# Or use the Makefile
make install
make install-plugins
make start-standalone-postgres
```
## Development Guidelines
### Java Backend
- Use Java 21 features
- Follow Micronaut framework patterns
- Add Swagger annotations for API documentation
- Use annotation processors (enable in IDE)
- Set `MICRONAUT_ENVIRONMENTS=local,override` for custom config
- Set `KESTRA_PLUGINS_PATH` for custom plugin loading
### Vue.js Frontend
- Vue 3 with Composition API
- TypeScript for type safety
- Vite for build tooling
- ESLint and Prettier for code quality
- Component-based architecture in `src/components/`
### Code Style
- Follow `.editorconfig` settings
- Use 4 spaces for Java, 2 spaces for YAML/JSON/CSS
- Enable format on save in VSCode
- Use Prettier for frontend code formatting
## Testing Strategy
### Backend Testing
- Unit tests in `src/test/java/`
- Integration tests in `tests/` module
- Use Micronaut test framework
- Test both local and standalone modes
### Frontend Testing
- Unit tests with Jest
- E2E tests with Playwright
- Component testing with Storybook
- Run `npm run test:unit` and `npm run test:e2e`
## Plugin Development
### Creating Plugins
- Follow the [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/)
- Place JAR files in `KESTRA_PLUGINS_PATH`
- Use the plugin template structure
- Test with both local and standalone modes
### Plugin Loading
- Set `KESTRA_PLUGINS_PATH` environment variable
- Use devcontainer mounts for local development
- Plugins are loaded at startup
## Common Issues and Solutions
### JavaScript Heap Out of Memory
Set `NODE_OPTIONS=--max-old-space-size=4096` environment variable.
### CORS Issues
Ensure backend CORS is configured for `http://localhost:5173` when using frontend dev server.
### Database Connection Issues
- Use `host.docker.internal` instead of `localhost` when connecting from devcontainer
- Verify PostgreSQL is running and accessible
- Check database credentials and permissions
### Gradle Build Issues
- Clear Gradle cache: `./gradlew clean`
- Check Java version compatibility
- Verify all dependencies are available
## Pull Request Guidelines
### Before Submitting
1. Run all tests: `./gradlew test` and `npm test`
2. Check code formatting: `./gradlew spotlessCheck`
3. Verify CORS configuration if changing API
4. Test both local and standalone modes
5. Update documentation for user-facing changes
### Commit Messages
- Follow conventional commit format
- Use present tense ("Add feature" not "Added feature")
- Reference issue numbers when applicable
- Keep commits focused and atomic
### Review Checklist
- [ ] All tests pass
- [ ] Code follows project style guidelines
- [ ] Documentation is updated
- [ ] No breaking changes without migration guide
- [ ] CORS properly configured if API changes
- [ ] Both local and standalone modes tested
## Useful Commands
```bash
# Quick development commands
./gradlew runLocal # Start local backend
./gradlew :ui:build # Build frontend
./gradlew clean build # Clean rebuild
npm run dev # Start frontend dev server
make install # Install Kestra locally
make start-standalone-postgres # Start with PostgreSQL
# Testing commands
./gradlew test # Run all backend tests
./gradlew :core:test # Run specific module tests
npm run test # Run frontend tests
npm run lint # Lint frontend code
```
## Getting Help
- Open a [GitHub issue](https://github.com/kestra-io/kestra/issues)
- Join the [Kestra Slack community](https://kestra.io/slack)
- Check the [main documentation](https://kestra.io/docs)
## Environment Variables
| Variable | Description | Default |
|----------|-------------|---------|
| `MICRONAUT_ENVIRONMENTS` | Custom config environments | `local,override` |
| `KESTRA_PLUGINS_PATH` | Path to custom plugins | `/workspaces/kestra/local/plugins` |
| `NODE_OPTIONS` | Node.js options | `--max-old-space-size=4096` |
| `JAVA_HOME` | Java installation path | `/usr/java/jdk-21` |
Remember: Always test your changes in both local and standalone modes, and ensure CORS is properly configured for frontend development.

View File

@@ -1,7 +0,0 @@
FROM kestra/kestra:develop
USER root
COPY --chown=kestra:kestra docker /
USER kestra

View File

@@ -77,7 +77,7 @@ install-plugins:
else \
${KESTRA_BASEDIR}/bin/kestra plugins install $$CURRENT_PLUGIN \
--plugins ${KESTRA_BASEDIR}/plugins \
--repositories=https://central.sonatype.com/repository/maven-snapshots || exit 1; \
--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots || exit 1; \
fi \
done < $$PLUGIN_LIST
@@ -130,6 +130,9 @@ datasources:
username: kestra
password: k3str4
kestra:
server:
basic-auth:
enabled: false
encryption:
secret-key: 3ywuDa/Ec61VHkOX3RlI9gYq7CaD0mv0Pf3DHtAXA6U=
repository:

View File

@@ -65,6 +65,10 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
## 🚀 Quick Start
### Try the Live Demo
Try Kestra with our [**Live Demo**](https://demo.kestra.io/ui/login?auto). No installation required!
### Get Started Locally in 5 Minutes
#### Launch Kestra in Docker

View File

@@ -1,47 +0,0 @@
#!/bin/bash
set -e
# E2E main script that can be run on a dev computer or in the CI
# it will build the backend of the current git repo and the frontend
# create a docker image out of it
# run tests on this image
LOCAL_IMAGE_VERSION="local-e2e-$(date +%s)"
echo "Running E2E"
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
start_time=$(date +%s)
echo ""
echo "Building the image for this current repository"
make clean
make build-docker VERSION=$LOCAL_IMAGE_VERSION
end_time=$(date +%s)
elapsed=$(( end_time - start_time ))
echo ""
echo "building elapsed time: ${elapsed} seconds"
echo ""
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
start_time2=$(date +%s)
echo "cd ./ui"
cd ./ui
echo "npm i"
npm i
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"
end_time2=$(date +%s)
elapsed2=$(( end_time2 - start_time2 ))
echo ""
echo "Tests elapsed time: ${elapsed2} seconds"
echo ""
total_elapsed=$(( elapsed + elapsed2 ))
echo "Total elapsed time: ${total_elapsed} seconds"
echo ""
exit 0

View File

@@ -16,7 +16,7 @@ plugins {
id "java"
id 'java-library'
id "idea"
id "com.gradleup.shadow" version "8.3.9"
id "com.gradleup.shadow" version "8.3.6"
id "application"
// test
@@ -31,10 +31,12 @@ plugins {
id 'com.github.node-gradle.node' version '7.1.0'
// release
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
id 'net.researchgate.release' version '3.1.0'
id "com.gorylenko.gradle-git-properties" version "2.5.2"
id "com.gorylenko.gradle-git-properties" version "2.5.0"
id 'signing'
id "com.vanniktech.maven.publish" version "0.34.0"
id 'ru.vyarus.pom' version '3.0.0' apply false
id 'ru.vyarus.github-info' version '2.0.0' apply false
// OWASP dependency check
id "org.owasp.dependencycheck" version "12.1.3" apply false
@@ -71,11 +73,6 @@ dependencies {
* Dependencies
**********************************************************************************************************************/
allprojects {
tasks.withType(GenerateModuleMetadata).configureEach {
suppressedValidationErrors.add('enforced-platform')
}
if (it.name != 'platform') {
group = "io.kestra"
@@ -148,7 +145,6 @@ allprojects {
implementation group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names'
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-guava'
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310'
implementation group: 'com.fasterxml.uuid', name: 'java-uuid-generator'
// kestra
implementation group: 'com.devskiller.friendly-id', name: 'friendly-id'
@@ -225,14 +221,14 @@ subprojects {
}
testlogger {
theme = 'mocha-parallel'
showExceptions = true
showFullStackTraces = true
showCauses = true
slowThreshold = 2000
showStandardStreams = true
showPassedStandardStreams = false
showSkippedStandardStreams = true
theme 'mocha-parallel'
showExceptions true
showFullStackTraces true
showCauses true
slowThreshold 2000
showStandardStreams true
showPassedStandardStreams false
showSkippedStandardStreams true
}
}
}
@@ -410,7 +406,7 @@ jar {
shadowJar {
archiveClassifier.set(null)
mergeServiceFiles()
zip64 = true
zip64 true
}
distZip.dependsOn shadowJar
@@ -418,7 +414,6 @@ distTar.dependsOn shadowJar
startScripts.dependsOn shadowJar
startShadowScripts.dependsOn jar
shadowJar.dependsOn 'ui:assembleFrontend'
shadowJar.dependsOn jar
/**********************************************************************************************************************\
* Executable Jar
@@ -427,8 +422,8 @@ def executableDir = layout.buildDirectory.dir("executable")
def executable = layout.buildDirectory.file("executable/${project.name}-${project.version}").get().asFile
tasks.register('writeExecutableJar') {
group = "build"
description = "Write an executable jar from shadow jar"
group "build"
description "Write an executable jar from shadow jar"
dependsOn = [shadowJar]
final shadowJarFile = tasks.shadowJar.outputs.files.singleFile
@@ -454,8 +449,8 @@ tasks.register('writeExecutableJar') {
}
tasks.register('executableJar', Zip) {
group = "build"
description = "Zip the executable jar"
group "build"
description "Zip the executable jar"
dependsOn = [writeExecutableJar]
archiveFileName = "${project.name}-${project.version}.zip"
@@ -489,11 +484,24 @@ tasks.register('runStandalone', JavaExec) {
/**********************************************************************************************************************\
* Publish
**********************************************************************************************************************/
subprojects {subProject ->
nexusPublishing {
repositoryDescription = "${project.group}:${rootProject.name}:${project.version}"
useStaging = !project.version.endsWith("-SNAPSHOT")
repositories {
sonatype {
nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/"))
snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/"))
}
}
}
if (subProject.name != 'jmh-benchmarks' && subProject.name != rootProject.name) {
subprojects {
if (it.name != 'jmh-benchmarks') {
apply plugin: "maven-publish"
apply plugin: 'signing'
apply plugin: "com.vanniktech.maven.publish"
apply plugin: 'ru.vyarus.pom'
apply plugin: 'ru.vyarus.github-info'
javadoc {
options {
@@ -527,126 +535,66 @@ subprojects {subProject ->
}
}
//These modules should not be published
def unpublishedModules = ["jdbc-mysql", "jdbc-postgres", "webserver"]
if (subProject.name in unpublishedModules){
return
github {
user 'kestra-io'
license 'Apache'
repository 'kestra'
site 'https://kestra.io'
}
mavenPublishing {
publishToMavenCentral(true)
signAllPublications()
maven.pom {
description = 'The modern, scalable orchestrator & scheduler open source platform'
coordinates(
"${rootProject.group}",
subProject.name == "cli" ? rootProject.name : subProject.name,
"${rootProject.version}"
)
pom {
name = project.name
description = "${project.group}:${project.name}:${rootProject.version}"
url = "https://github.com/kestra-io/${rootProject.name}"
licenses {
license {
name = "The Apache License, Version 2.0"
url = "http://www.apache.org/licenses/LICENSE-2.0.txt"
}
}
developers {
developer {
id = "tchiotludo"
name = "Ludovic Dehon"
email = "ldehon@kestra.io"
}
}
scm {
connection = 'scm:git:'
url = "https://github.com/kestra-io/${rootProject.name}"
developers {
developer {
id = "tchiotludo"
name = "Ludovic Dehon"
}
}
}
afterEvaluate {
publishing {
publications {
withType(MavenPublication).configureEach { publication ->
publishing {
publications {
sonatypePublication(MavenPublication) {
version project.version
if (subProject.name == "platform") {
// Clear all artifacts except the BOM
publication.artifacts.clear()
}
if (project.name.contains('cli')) {
groupId "io.kestra"
artifactId "kestra"
artifact shadowJar
artifact executableJar
} else if (project.name.contains('platform')){
groupId project.group
artifactId project.name
} else {
from components.java
groupId project.group
artifactId project.name
artifact sourcesJar
artifact javadocJar
artifact testsJar
}
}
}
}
if (subProject.name == 'cli') {
/* Make sure the special publication is wired *after* every plugin */
subProject.afterEvaluate {
/* 1. Remove the default java component so Gradle stops expecting
the standard cli-*.jar, sources, javadoc, etc. */
components.removeAll { it.name == "java" }
/* 2. Replace the publications artifacts with shadow + exec */
publishing.publications.withType(MavenPublication).configureEach { pub ->
pub.artifacts.clear()
// main shadow JAR built at root
pub.artifact(rootProject.tasks.named("shadowJar").get()) {
extension = "jar"
}
// executable ZIP built at root
pub.artifact(rootProject.tasks.named("executableJar").get().archiveFile) {
classifier = "exec"
extension = "zip"
}
pub.artifact(tasks.named("sourcesJar").get())
pub.artifact(tasks.named("javadocJar").get())
}
/* 3. Disable Gradle-module metadata for this publication to
avoid the “artifact removed from java component” error. */
tasks.withType(GenerateModuleMetadata).configureEach { it.enabled = false }
/* 4. Make every publish task in :cli wait for the two artifacts */
tasks.matching { it.name.startsWith("publish") }.configureEach {
dependsOn rootProject.tasks.named("shadowJar")
dependsOn rootProject.tasks.named("executableJar")
}
}
signing {
// only sign JARs that we publish to Sonatype
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
sign publishing.publications.sonatypePublication
}
if (subProject.name != 'platform' && subProject.name != 'cli') {
// only if a test source set actually exists (avoids empty artifacts)
def hasTests = subProject.extensions.findByName('sourceSets')?.findByName('test') != null
if (hasTests) {
// wire the artifact onto every Maven publication of this subproject
publishing {
publications {
withType(MavenPublication).configureEach { pub ->
// keep the normal java component + sources/javadoc already configured
pub.artifact(subProject.tasks.named('testsJar').get())
}
}
}
// make sure publish tasks build the tests jar first
tasks.matching { it.name.startsWith('publish') }.configureEach {
dependsOn subProject.tasks.named('testsJar')
}
}
tasks.withType(GenerateModuleMetadata).configureEach {
// Suppression this validation error as we want to enforce the Kestra platform
suppressedValidationErrors.add('enforced-platform')
}
}
}
/**********************************************************************************************************************\
* Version
**********************************************************************************************************************/

View File

@@ -37,4 +37,4 @@ dependencies {
//test
testImplementation "org.wiremock:wiremock-jetty12"
}
}

View File

@@ -1,5 +1,7 @@
package io.kestra.cli;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.http.HttpHeaders;
import io.micronaut.http.HttpRequest;
@@ -14,15 +16,16 @@ import io.micronaut.http.netty.body.NettyJsonHandler;
import io.micronaut.json.JsonMapper;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
import picocli.CommandLine;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import lombok.Builder;
import lombok.Value;
import lombok.extern.jackson.Jacksonized;
import picocli.CommandLine;
public abstract class AbstractApiCommand extends AbstractCommand {
@CommandLine.Option(names = {"--server"}, description = "Kestra server url", defaultValue = "http://localhost:8080")
@@ -34,7 +37,7 @@ public abstract class AbstractApiCommand extends AbstractCommand {
@CommandLine.Option(names = {"--user"}, paramLabel = "<user:password>", description = "Server user and password")
protected String user;
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only)")
@CommandLine.Option(names = {"--tenant"}, description = "Tenant identifier (EE only, when multi-tenancy is enabled)")
protected String tenantId;
@CommandLine.Option(names = {"--api-token"}, description = "API Token (EE only).")
@@ -84,12 +87,12 @@ public abstract class AbstractApiCommand extends AbstractCommand {
return request;
}
protected String apiUri(String path, String tenantId) {
protected String apiUri(String path) {
if (path == null || !path.startsWith("/")) {
throw new IllegalArgumentException("'path' must be non-null and start with '/'");
}
return "/api/v1/" + tenantId + path;
return tenantId == null ? "/api/v1/" + MAIN_TENANT + path : "/api/v1/" + tenantId + path;
}
@Builder

View File

@@ -40,7 +40,7 @@ import picocli.CommandLine.Option;
)
@Slf4j
@Introspected
public abstract class AbstractCommand implements Callable<Integer> {
abstract public class AbstractCommand implements Callable<Integer> {
@Inject
private ApplicationContext applicationContext;
@@ -93,7 +93,7 @@ public abstract class AbstractCommand implements Callable<Integer> {
this.startupHook.start(this);
}
if (pluginRegistryProvider != null && this.pluginsPath != null && loadExternalPlugins()) {
if (this.pluginsPath != null && loadExternalPlugins()) {
pluginRegistry = pluginRegistryProvider.get();
pluginRegistry.registerIfAbsent(pluginsPath);

View File

@@ -1,6 +1,5 @@
package io.kestra.cli;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.serializers.YamlParser;
@@ -10,7 +9,6 @@ import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import picocli.CommandLine;
import java.io.IOException;
@@ -33,9 +31,6 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "the directory containing files to check")
protected Path directory;
@Inject
private TenantIdSelectorService tenantService;
/** {@inheritDoc} **/
@Override
protected boolean loadExternalPlugins() {
@@ -117,7 +112,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/validate", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/validate"), body).contentType(MediaType.APPLICATION_YAML);
List<ValidateConstraintViolation> validations = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -66,14 +66,8 @@ public class App implements Callable<Integer> {
ApplicationContext applicationContext = App.applicationContext(cls, args);
// Call Picocli command
int exitCode = 0;
try {
exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
} catch (CommandLine.InitializationException e){
System.err.println("Could not initialize picoli ComandLine, err: " + e.getMessage());
e.printStackTrace();
exitCode = 1;
}
int exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
applicationContext.close();
// exit code

View File

@@ -1,4 +1,4 @@
package io.kestra.core.validations;
package io.kestra.cli;
import io.micronaut.context.annotation.Context;
import io.micronaut.context.annotation.Requires;

View File

@@ -2,13 +2,11 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -25,9 +23,6 @@ public class FlowCreateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "0", description = "The file containing the flow")
public Path flowFile;
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
@@ -39,7 +34,7 @@ public class FlowCreateCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows"), body).contentType(MediaType.APPLICATION_YAML);
client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -2,12 +2,10 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -25,9 +23,6 @@ public class FlowDeleteCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "1", description = "The ID of the flow")
public String id;
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
@@ -35,7 +30,7 @@ public class FlowDeleteCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.DELETE(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)));
.DELETE(apiUri("/flows/" + namespace + "/" + id ));
client.toBlocking().exchange(
this.requestOptions(request)

View File

@@ -2,7 +2,7 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.context.ApplicationContext;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
@@ -25,8 +25,9 @@ import java.nio.file.Path;
public class FlowExportCommand extends AbstractApiCommand {
private static final String DEFAULT_FILE_NAME = "flows.zip";
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
@Inject
private TenantIdSelectorService tenantService;
private ApplicationContext applicationContext;
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of flows to export")
public String namespace;
@@ -40,7 +41,7 @@ public class FlowExportCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<Object> request = HttpRequest
.GET(apiUri("/flows/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
.GET(apiUri("/flows/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
.accept(MediaType.APPLICATION_OCTET_STREAM);
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);

View File

@@ -1,8 +1,7 @@
package io.kestra.cli.commands.flows;
import com.google.common.collect.ImmutableMap;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
@@ -31,7 +30,7 @@ import java.util.concurrent.TimeoutException;
description = "Test a flow"
)
@Slf4j
public class FlowTestCommand extends AbstractApiCommand {
public class FlowTestCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@@ -77,7 +76,6 @@ public class FlowTestCommand extends AbstractApiCommand {
FlowRepositoryInterface flowRepository = applicationContext.getBean(FlowRepositoryInterface.class);
FlowInputOutput flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
RunnerUtils runnerUtils = applicationContext.getBean(RunnerUtils.class);
TenantIdSelectorService tenantService = applicationContext.getBean(TenantIdSelectorService.class);
Map<String, Object> inputs = new HashMap<>();
@@ -91,7 +89,7 @@ public class FlowTestCommand extends AbstractApiCommand {
try {
runner.run();
repositoryLoader.load(tenantService.getTenantId(tenantId), file.toFile());
repositoryLoader.load(file.toFile());
List<Flow> all = flowRepository.findAllForAllTenants();
if (all.size() != 1) {

View File

@@ -2,13 +2,11 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -31,9 +29,6 @@ public class FlowUpdateCommand extends AbstractApiCommand {
@CommandLine.Parameters(index = "2", description = "The ID of the flow")
public String id;
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
@@ -45,7 +40,7 @@ public class FlowUpdateCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.PUT(apiUri("/flows/" + namespace + "/" + id, tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
.PUT(apiUri("/flows/" + namespace + "/" + id ), body).contentType(MediaType.APPLICATION_YAML);
client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -2,7 +2,6 @@ package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.serializers.YamlParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
@@ -10,7 +9,6 @@ import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -38,9 +36,6 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
public String namespace;
@Inject
private TenantIdSelectorService tenantIdSelectorService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
@@ -71,7 +66,7 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
namespaceQuery = "&namespace=" + namespace;
}
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/bulk", tenantIdSelectorService.getTenantId(tenantId)) + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/bulk") + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -1,7 +1,6 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.services.FlowService;
@@ -23,9 +22,6 @@ public class FlowValidateCommand extends AbstractValidateCommand {
@Inject
private FlowService flowService;
@Inject
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
return this.call(
@@ -39,7 +35,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
FlowWithSource flow = (FlowWithSource) object;
List<String> warnings = new ArrayList<>();
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
warnings.addAll(flowService.warnings(flow, tenantService.getTenantId(tenantId)));
warnings.addAll(flowService.warnings(flow, this.tenantId));
return warnings;
},
(Object object) -> {

View File

@@ -3,7 +3,6 @@ package io.kestra.cli.commands.flows.namespaces;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
import io.kestra.cli.commands.flows.IncludeHelperExpander;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.serializers.YamlParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
@@ -11,7 +10,6 @@ import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -32,9 +30,6 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
public boolean override = false;
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
@@ -64,7 +59,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
}
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/") + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -2,14 +2,12 @@ package io.kestra.cli.commands.namespaces.files;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.utils.KestraIgnore;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.multipart.MultipartBody;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -36,9 +34,6 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
public boolean delete = false;
@Inject
private TenantIdSelectorService tenantService;
private static final String KESTRA_IGNORE_FILE = ".kestraignore";
@Override
@@ -49,7 +44,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
try (var files = Files.walk(from); DefaultHttpClient client = client()) {
if (delete) {
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + to, null)));
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/") + namespace + "/files?path=" + to, null)));
}
KestraIgnore kestraIgnore = new KestraIgnore(from);
@@ -67,7 +62,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
client.toBlocking().exchange(
this.requestOptions(
HttpRequest.POST(
apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + destination,
apiUri("/namespaces/") + namespace + "/files?path=" + destination,
body
).contentType(MediaType.MULTIPART_FORM_DATA)
)

View File

@@ -3,13 +3,11 @@ package io.kestra.cli.commands.namespaces.kv;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
@@ -44,9 +42,6 @@ public class KvUpdateCommand extends AbstractApiCommand {
@Option(names = {"-f", "--file-value"}, description = "The file from which to read the value to set. If this is provided, it will take precedence over any specified value.")
public Path fileValue;
@Inject
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
super.call();
@@ -61,7 +56,7 @@ public class KvUpdateCommand extends AbstractApiCommand {
Duration ttl = expiration == null ? null : Duration.parse(expiration);
MutableHttpRequest<String> request = HttpRequest
.PUT(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/kv/" + key, value)
.PUT(apiUri("/namespaces/") + namespace + "/kv/" + key, value)
.contentType(MediaType.APPLICATION_JSON_TYPE);
if (ttl != null) {

View File

@@ -18,8 +18,6 @@ import java.nio.file.Paths;
import java.util.Base64;
import java.util.List;
import static io.kestra.core.models.Plugin.isDeprecated;
@CommandLine.Command(
name = "doc",
description = "Generate documentation for all plugins currently installed"
@@ -40,9 +38,6 @@ public class PluginDocCommand extends AbstractCommand {
@CommandLine.Option(names = {"--schema"}, description = "Also write JSON Schema for each task")
private boolean schema = false;
@CommandLine.Option(names = {"--skip-deprecated"},description = "Skip deprecated plugins when generating documentations")
private boolean skipDeprecated = false;
@Override
public Integer call() throws Exception {
super.call();
@@ -50,11 +45,6 @@ public class PluginDocCommand extends AbstractCommand {
PluginRegistry registry = pluginRegistryProvider.get();
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
if (skipDeprecated) {
plugins = plugins.stream()
.filter(plugin -> !isDeprecated(plugin.getClass()))
.toList();
}
boolean hasFailures = false;
for (RegisteredPlugin registeredPlugin : plugins) {

View File

@@ -16,6 +16,6 @@ abstract public class AbstractServerCommand extends AbstractCommand implements S
}
protected static int defaultWorkerThread() {
return Runtime.getRuntime().availableProcessors() * 8;
return Runtime.getRuntime().availableProcessors() * 4;
}
}

View File

@@ -2,7 +2,6 @@ package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.cli.services.FileChangedEventListener;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
@@ -45,10 +44,7 @@ public class StandAloneCommand extends AbstractServerCommand {
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "the flow path containing flow to inject at startup (when running with a memory flow repository)")
private File flowPath;
@CommandLine.Option(names = "--tenant", description = "Tenant identifier, Required to load flows from path with the enterprise edition")
private String tenantId;
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to eight times the number of available processors. Set it to 0 to avoid starting a worker.")
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to four times the number of available processors. Set it to 0 to avoid starting a worker.")
private int workerThread = defaultWorkerThread();
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
@@ -102,8 +98,7 @@ public class StandAloneCommand extends AbstractServerCommand {
if (flowPath != null) {
try {
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
TenantIdSelectorService tenantIdSelectorService = applicationContext.getBean(TenantIdSelectorService.class);
localFlowRepositoryLoader.load(tenantIdSelectorService.getTenantId(this.tenantId), this.flowPath);
localFlowRepositoryLoader.load(null, this.flowPath);
} catch (IOException e) {
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
}

View File

@@ -22,7 +22,7 @@ public class WorkerCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to eight times the number of available processors")
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to four times the number of available processors")
private int thread = defaultWorkerThread();
@Option(names = {"-g", "--worker-group"}, description = "The worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")

View File

@@ -2,8 +2,8 @@ package io.kestra.cli.commands.templates;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.templates.TemplateEnabled;
import io.micronaut.context.ApplicationContext;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
@@ -27,8 +27,9 @@ import java.nio.file.Path;
public class TemplateExportCommand extends AbstractApiCommand {
private static final String DEFAULT_FILE_NAME = "templates.zip";
// @FIXME: Keep it for bug in micronaut that need to have inject on top level command to inject on abstract classe
@Inject
private TenantIdSelectorService tenantService;
private ApplicationContext applicationContext;
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
public String namespace;
@@ -42,7 +43,7 @@ public class TemplateExportCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<Object> request = HttpRequest
.GET(apiUri("/templates/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
.GET(apiUri("/templates/export/by-query") + (namespace != null ? "?namespace=" + namespace : ""))
.accept(MediaType.APPLICATION_OCTET_STREAM);
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);

View File

@@ -2,7 +2,6 @@ package io.kestra.cli.commands.templates.namespaces;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.templates.TemplateEnabled;
import io.kestra.core.serializers.YamlParser;
@@ -11,7 +10,6 @@ import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -29,9 +27,6 @@ import jakarta.validation.ConstraintViolationException;
@TemplateEnabled
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@Inject
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
super.call();
@@ -49,7 +44,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
try (DefaultHttpClient client = client()) {
MutableHttpRequest<List<Template>> request = HttpRequest
.POST(apiUri("/templates/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, templates);
.POST(apiUri("/templates/") + namespace + "?delete=" + delete, templates);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -10,21 +10,24 @@ import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.core.services.PluginDefaultService;
import io.micronaut.context.annotation.Requires;
import io.micronaut.context.annotation.Value;
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
import jakarta.annotation.Nullable;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import java.util.concurrent.CopyOnWriteArrayList;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
@Singleton
@Slf4j
@Requires(property = "micronaut.io.watch.enabled", value = "true")
@@ -46,9 +49,13 @@ public class FileChangedEventListener {
@Inject
protected FlowListenersInterface flowListeners;
@Nullable
@Value("${micronaut.io.watch.tenantId}")
private String tenantId;
FlowFilesManager flowFilesManager;
private List<FlowWithPath> flows = new CopyOnWriteArrayList<>();
private List<FlowWithPath> flows = new ArrayList<>();
private boolean isStarted = false;
@@ -106,6 +113,8 @@ public class FileChangedEventListener {
}
public void startListening(List<Path> paths) throws IOException, InterruptedException {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
for (Path path : paths) {
path.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
}
@@ -148,20 +157,12 @@ public class FileChangedEventListener {
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
}
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(filePath), content));
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
}
} catch (NoSuchFileException e) {
log.warn("File not found: {}, deleting it", entry, e);
// the file might have been deleted while reading so if not found we try to delete the flow
flows.stream()
.filter(flow -> flow.getPath().equals(filePath.toString()))
.findFirst()
.ifPresent(flowWithPath -> {
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
});
log.error("File not found: {}", entry, e);
} catch (IOException e) {
log.error("Error reading file: {}", entry, e);
}
@@ -192,6 +193,8 @@ public class FileChangedEventListener {
}
private void loadFlowsFromFolder(Path folder) {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
try {
Files.walkFileTree(folder, new SimpleFileVisitor<Path>() {
@Override
@@ -211,7 +214,7 @@ public class FileChangedEventListener {
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
flows.add(FlowWithPath.of(flow.get(), file.toString()));
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(file), content));
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
}
}
return FileVisitResult.CONTINUE;
@@ -235,8 +238,10 @@ public class FileChangedEventListener {
}
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
try {
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(getTenantIdFromPath(entry), content, false);
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
modelValidator.validate(flow);
return Optional.of(flow);
} catch (ConstraintViolationException | FlowProcessingException e) {
@@ -260,8 +265,4 @@ public class FileChangedEventListener {
private Path buildPath(FlowInterface flow) {
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
}
private String getTenantIdFromPath(Path path) {
return path.getFileName().toString().split("_")[0];
}
}

View File

@@ -1,19 +0,0 @@
package io.kestra.cli.services;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import io.kestra.core.exceptions.KestraRuntimeException;
import jakarta.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
@Singleton
public class TenantIdSelectorService {
//For override purpose in Kestra EE
public String getTenantId(String tenantId) {
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
throw new KestraRuntimeException("Tenant id can only be 'main'");
}
return MAIN_TENANT;
}
}

View File

@@ -27,7 +27,6 @@ micronaut:
write-idle-timeout: 60m
idle-timeout: 60m
netty:
max-zstd-encode-size: 67108864 # increased to 64MB from the default of 32MB
max-chunk-size: 10MB
max-header-size: 32768 # increased from the default of 8k
responses:
@@ -184,6 +183,7 @@ kestra:
server:
basic-auth:
enabled: false
# These URLs will not be authenticated, by default we open some of the Micronaut default endpoints but not all for security reasons
open-urls:
- "/ping"
@@ -212,7 +212,7 @@ kestra:
retention: 30d
anonymous-usage-report:
enabled: true
uri: https://api.kestra.io/v1/server-events/
uri: https://api.kestra.io/v1/reports/usages
initial-delay: 5m
fixed-delay: 1h
@@ -228,4 +228,4 @@ otel:
- /health
- /env
- /prometheus
propagators: tracecontext, baggage
propagators: tracecontext, baggage

View File

@@ -1,4 +1,4 @@
package io.kestra.core.validations;
package io.kestra.cli;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.exceptions.BeanInstantiationException;

View File

@@ -108,34 +108,6 @@ class FlowCreateOrUpdateCommandTest {
}
}
@Test
void should_fail_with_incorrect_tenant() {
URL directory = FlowCreateOrUpdateCommandTest.class.getClassLoader().getResource("flows");
ByteArrayOutputStream err = new ByteArrayOutputStream();
System.setErr(new PrintStream(err));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] args = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"--tenant", "incorrect",
directory.getPath(),
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(err.toString()).contains("Tenant id can only be 'main'");
err.reset();
}
}
@Test
void helper() {
URL directory = FlowCreateOrUpdateCommandTest.class.getClassLoader().getResource("helper");

View File

@@ -1,33 +0,0 @@
package io.kestra.cli.commands.servers;
import static org.assertj.core.api.Assertions.assertThat;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.junit.jupiter.api.Test;
public class TenantIdSelectorServiceTest {
@Test
void should_fail_without_tenant_id() {
ByteArrayOutputStream err = new ByteArrayOutputStream();
System.setErr(new PrintStream(err));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
String[] start = {
"server", "standalone",
"-f", "unused",
"--tenant", "wrong_tenant"
};
PicocliRunner.call(App.class, ctx, start);
assertThat(err.toString()).contains("Tenant id can only be 'main'");
err.reset();
}
}
}

View File

@@ -1,7 +1,6 @@
package io.kestra.cli.services;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.utils.Await;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
@@ -72,9 +71,7 @@ class FileChangedEventListenerTest {
type: io.kestra.plugin.core.log.Log
message: Hello World! 🚀
""";
GenericFlow genericFlow = GenericFlow.fromYaml(MAIN_TENANT, flow);
Files.write(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"), flow.getBytes());
Files.write(Path.of(FILE_WATCH + "/myflow.yaml"), flow.getBytes());
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isPresent(),
Duration.ofMillis(100),
@@ -86,7 +83,7 @@ class FileChangedEventListenerTest {
assertThat(myflow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
// delete the flow
Files.delete(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"));
Files.delete(Path.of(FILE_WATCH + "/myflow.yaml"));
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isEmpty(),
Duration.ofMillis(100),
@@ -113,8 +110,7 @@ class FileChangedEventListenerTest {
values:
message: Hello World!
""";
GenericFlow genericFlow = GenericFlow.fromYaml(MAIN_TENANT, pluginDefault);
Files.write(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"), pluginDefault.getBytes());
Files.write(Path.of(FILE_WATCH + "/plugin-default.yaml"), pluginDefault.getBytes());
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isPresent(),
Duration.ofMillis(100),
@@ -126,7 +122,7 @@ class FileChangedEventListenerTest {
assertThat(pluginDefaultFlow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
// delete both files
Files.delete(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"));
Files.delete(Path.of(FILE_WATCH + "/plugin-default.yaml"));
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
Duration.ofMillis(100),

View File

@@ -17,7 +17,7 @@ kestra:
central:
url: https://repo.maven.apache.org/maven2/
sonatype:
url: https://central.sonatype.com/repository/maven-snapshots/
url: https://s01.oss.sonatype.org/content/repositories/snapshots/
server:
liveness:
enabled: false

View File

@@ -56,23 +56,21 @@ component_management:
name: Tests
paths:
- tests/**
- component_id: ui
name: Ui
paths:
- ui/**
- component_id: webserver
name: Webserver
paths:
- webserver/**
ignore:
- ui/**
# we are not mature yet to have a ui code coverage
flag_management:
default_rules:
carryforward: true
statuses:
- type: project
target: 70%
threshold: 10%
target: 80%
threshold: 1%
- type: patch
target: 75%
threshold: 10%
target: 90%

View File

@@ -37,7 +37,6 @@ dependencies {
implementation 'nl.basjes.gitignore:gitignore-reader'
implementation group: 'dev.failsafe', name: 'failsafe'
implementation 'com.github.ben-manes.caffeine:caffeine'
implementation 'com.github.ksuid:ksuid:1.1.3'
api 'org.apache.httpcomponents.client5:httpclient5'
// plugins
@@ -75,9 +74,7 @@ dependencies {
testImplementation "io.micronaut:micronaut-http-server-netty"
testImplementation "io.micronaut:micronaut-management"
testImplementation "org.testcontainers:testcontainers:1.21.3"
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
testImplementation "org.testcontainers:testcontainers:1.21.1"
testImplementation "org.testcontainers:junit-jupiter:1.21.1"
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.81"
testImplementation "org.wiremock:wiremock-jetty12"
}

View File

@@ -1,26 +0,0 @@
package io.kestra.core.debug;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotNull;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
@AllArgsConstructor
@NoArgsConstructor
@Getter
public class Breakpoint {
@NotNull
private String id;
@Nullable
private String value;
public static Breakpoint of(String breakpoint) {
if (breakpoint.indexOf('.') > 0) {
return new Breakpoint(breakpoint.substring(0, breakpoint.indexOf('.')), breakpoint.substring(breakpoint.indexOf('.') + 1));
} else {
return new Breakpoint(breakpoint, null);
}
}
}

View File

@@ -6,17 +6,14 @@ import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
@Getter
@EqualsAndHashCode
@ToString
public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
private static final Map<PluginDocIdentifier, ClassPluginDocumentation<?>> CACHE = new ConcurrentHashMap<>();
private String icon;
private String group;
protected String docLicense;
@@ -81,12 +78,8 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
}
}
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, String version, boolean allProperties) {
//noinspection unchecked
return (ClassPluginDocumentation<T>) CACHE.computeIfAbsent(
new PluginDocIdentifier(plugin.type(), version, allProperties),
(key) -> new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, allProperties)
);
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, allProperties);
}
@AllArgsConstructor
@@ -97,11 +90,5 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
String unit;
String description;
}
private record PluginDocIdentifier(String pluginClassAndVersion, boolean allProperties) {
public PluginDocIdentifier(Class<?> pluginClass, String version, boolean allProperties) {
this(pluginClass.getName() + ":" + version, allProperties);
}
}
}

View File

@@ -227,7 +227,7 @@ public class DocumentationGenerator {
baseCls,
null
);
return ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, registeredPlugin.version(), true);
return ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, true);
})
.map(pluginDocumentation -> {
try {

View File

@@ -24,7 +24,6 @@ public class JsonSchemaCache {
private final JsonSchemaGenerator jsonSchemaGenerator;
private final ConcurrentMap<CacheKey, Map<String, Object>> schemaCache = new ConcurrentHashMap<>();
private final ConcurrentMap<SchemaType, Map<String, Object>> propertiesCache = new ConcurrentHashMap<>();
private final Map<SchemaType, Class<?>> classesBySchemaType = new HashMap<>();
@@ -45,7 +44,7 @@ public class JsonSchemaCache {
public Map<String, Object> getSchemaForType(final SchemaType type,
final boolean arrayOf) {
return schemaCache.computeIfAbsent(new CacheKey(type, arrayOf), key -> {
return schemaCache.computeIfAbsent(new CacheKey(type, arrayOf), (key) -> {
Class<?> cls = Optional.ofNullable(classesBySchemaType.get(type))
.orElseThrow(() -> new IllegalArgumentException("Cannot found schema for type '" + type + "'"));
@@ -53,16 +52,6 @@ public class JsonSchemaCache {
});
}
public Map<String, Object> getPropertiesForType(final SchemaType type) {
return propertiesCache.computeIfAbsent(type, key -> {
Class<?> cls = Optional.ofNullable(classesBySchemaType.get(type))
.orElseThrow(() -> new IllegalArgumentException("Cannot found properties for type '" + type + "'"));
return jsonSchemaGenerator.properties(null, cls);
});
}
// must be public as it's used in EE
public void registerClassForType(final SchemaType type, final Class<?> clazz) {
classesBySchemaType.put(type, clazz);
}

View File

@@ -53,8 +53,6 @@ import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static io.kestra.core.docs.AbstractClassDocumentation.flattenWithoutType;
import static io.kestra.core.docs.AbstractClassDocumentation.required;
import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
@Singleton
@@ -90,20 +88,12 @@ public class JsonSchemaGenerator {
}
public <T> Map<String, Object> schemas(Class<? extends T> cls, boolean arrayOf) {
return this.schemas(cls, arrayOf, Collections.emptyList());
}
public <T> Map<String, Object> schemas(Class<? extends T> cls, boolean arrayOf, List<String> allowedPluginTypes) {
return this.schemas(cls, arrayOf, allowedPluginTypes, false);
}
public <T> Map<String, Object> schemas(Class<? extends T> cls, boolean arrayOf, List<String> allowedPluginTypes, boolean withOutputs) {
SchemaGeneratorConfigBuilder builder = new SchemaGeneratorConfigBuilder(
SchemaVersion.DRAFT_7,
OptionPreset.PLAIN_JSON
);
this.build(builder, true, allowedPluginTypes, withOutputs);
this.build(builder, true);
SchemaGeneratorConfig schemaGeneratorConfig = builder.build();
@@ -128,13 +118,12 @@ public class JsonSchemaGenerator {
if (jsonNode instanceof ObjectNode clazzSchema && clazzSchema.get("required") instanceof ArrayNode requiredPropsNode && clazzSchema.get("properties") instanceof ObjectNode properties) {
List<String> requiredFieldValues = StreamSupport.stream(requiredPropsNode.spliterator(), false)
.map(JsonNode::asText)
.collect(Collectors.toList());
.toList();
properties.fields().forEachRemaining(e -> {
int indexInRequiredArray = requiredFieldValues.indexOf(e.getKey());
if (indexInRequiredArray != -1 && e.getValue() instanceof ObjectNode valueNode && valueNode.has("default")) {
requiredPropsNode.remove(indexInRequiredArray);
requiredFieldValues.remove(indexInRequiredArray);
}
});
@@ -251,14 +240,6 @@ public class JsonSchemaGenerator {
}
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7) {
this.build(builder, draft7, Collections.emptyList());
}
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7, List<String> allowedPluginTypes) {
this.build(builder, draft7, allowedPluginTypes, false);
}
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7, List<String> allowedPluginTypes, boolean withOutputs) {
// builder.withObjectMapper(builder.getObjectMapper().configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false));
builder
.with(new JakartaValidationModule(
@@ -440,13 +421,6 @@ public class JsonSchemaGenerator {
if (pluginAnnotation.beta()) {
collectedTypeAttributes.put("$beta", true);
}
if (withOutputs) {
Map<String, Object> outputsSchema = this.outputs(null, scope.getType().getErasedType());
collectedTypeAttributes.set("outputs", context.getGeneratorConfig().createObjectNode().pojoNode(
flattenWithoutType(AbstractClassDocumentation.properties(outputsSchema), required(outputsSchema))
));
}
}
// handle deprecated tasks
@@ -482,7 +456,7 @@ public class JsonSchemaGenerator {
.withSubtypeResolver((declaredType, context) -> {
TypeContext typeContext = context.getTypeContext();
return this.subtypeResolver(declaredType, typeContext, allowedPluginTypes);
return this.subtypeResolver(declaredType, typeContext);
});
// description as Markdown
@@ -559,7 +533,7 @@ public class JsonSchemaGenerator {
return null;
}
return this.subtypeResolver(declaredType, typeContext, allowedPluginTypes);
return this.subtypeResolver(declaredType, typeContext);
});
}
@@ -642,12 +616,11 @@ public class JsonSchemaGenerator {
return false;
}
protected List<ResolvedType> subtypeResolver(ResolvedType declaredType, TypeContext typeContext, List<String> allowedPluginTypes) {
protected List<ResolvedType> subtypeResolver(ResolvedType declaredType, TypeContext typeContext) {
if (declaredType.getErasedType() == Task.class) {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getTasks().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
@@ -655,7 +628,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getTriggers().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
@@ -663,7 +635,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getConditions().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
@@ -672,7 +643,6 @@ public class JsonSchemaGenerator {
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getConditions().stream())
.filter(ScheduleCondition.class::isAssignableFrom)
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
@@ -680,7 +650,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getTaskRunners().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
.toList();
@@ -688,7 +657,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getLogExporters().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
.toList();
@@ -698,7 +666,6 @@ public class JsonSchemaGenerator {
.flatMap(registeredPlugin -> registeredPlugin.getAdditionalPlugins().stream())
// for additional plugins, we have one subtype by type of additional plugins (for ex: embedding store for Langchain4J), so we need to filter on the correct subtype
.filter(cls -> declaredType.getErasedType().isAssignableFrom(cls))
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(cls -> cls != declaredType.getErasedType())
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
@@ -707,7 +674,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getCharts().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.<ResolvedType>mapMulti((clz, consumer) -> {
if (DataChart.class.isAssignableFrom(clz)) {
@@ -774,16 +740,12 @@ public class JsonSchemaGenerator {
}
protected <T> Map<String, Object> generate(Class<? extends T> cls, @Nullable Class<T> base) {
return this.generate(cls, base, Collections.emptyList());
}
protected <T> Map<String, Object> generate(Class<? extends T> cls, @Nullable Class<T> base, List<String> allowedPluginTypes) {
SchemaGeneratorConfigBuilder builder = new SchemaGeneratorConfigBuilder(
SchemaVersion.DRAFT_2019_09,
OptionPreset.PLAIN_JSON
);
this.build(builder, false, allowedPluginTypes);
this.build(builder, false);
// we don't return base properties unless specified with @PluginProperty and hidden is false
builder

View File

@@ -23,25 +23,29 @@ public class Plugin {
private String group;
private String version;
private Map<String, String> manifest;
private List<String> tasks;
private List<String> triggers;
private List<String> conditions;
private List<String> controllers;
private List<String> storages;
private List<String> secrets;
private List<String> taskRunners;
private List<String> guides;
private List<String> aliases;
private List<PluginElementMetadata> tasks;
private List<PluginElementMetadata> triggers;
private List<PluginElementMetadata> conditions;
private List<PluginElementMetadata> controllers;
private List<PluginElementMetadata> storages;
private List<PluginElementMetadata> secrets;
private List<PluginElementMetadata> taskRunners;
private List<PluginElementMetadata> apps;
private List<PluginElementMetadata> appBlocks;
private List<PluginElementMetadata> charts;
private List<PluginElementMetadata> dataFilters;
private List<PluginElementMetadata> logExporters;
private List<PluginElementMetadata> additionalPlugins;
private List<String> apps;
private List<String> appBlocks;
private List<String> charts;
private List<String> dataFilters;
private List<String> logExporters;
private List<String> additionalPlugins;
private List<PluginSubGroup.PluginCategory> categories;
private String subGroup;
public static Plugin of(RegisteredPlugin registeredPlugin, @Nullable String subgroup) {
return Plugin.of(registeredPlugin, subgroup, true);
}
public static Plugin of(RegisteredPlugin registeredPlugin, @Nullable String subgroup, boolean includeDeprecated) {
Plugin plugin = new Plugin();
plugin.name = registeredPlugin.name();
PluginSubGroup subGroupInfos = null;
@@ -86,18 +90,18 @@ public class Plugin {
plugin.subGroup = subgroup;
Predicate<Class<?>> packagePredicate = c -> subgroup == null || c.getPackageName().equals(subgroup);
plugin.tasks = filterAndGetTypeWithMetadata(registeredPlugin.getTasks(), packagePredicate);
plugin.triggers = filterAndGetTypeWithMetadata(registeredPlugin.getTriggers(), packagePredicate);
plugin.conditions = filterAndGetTypeWithMetadata(registeredPlugin.getConditions(), packagePredicate);
plugin.storages = filterAndGetTypeWithMetadata(registeredPlugin.getStorages(), packagePredicate);
plugin.secrets = filterAndGetTypeWithMetadata(registeredPlugin.getSecrets(), packagePredicate);
plugin.taskRunners = filterAndGetTypeWithMetadata(registeredPlugin.getTaskRunners(), packagePredicate);
plugin.apps = filterAndGetTypeWithMetadata(registeredPlugin.getApps(), packagePredicate);
plugin.appBlocks = filterAndGetTypeWithMetadata(registeredPlugin.getAppBlocks(), packagePredicate);
plugin.charts = filterAndGetTypeWithMetadata(registeredPlugin.getCharts(), packagePredicate);
plugin.dataFilters = filterAndGetTypeWithMetadata(registeredPlugin.getDataFilters(), packagePredicate);
plugin.logExporters = filterAndGetTypeWithMetadata(registeredPlugin.getLogExporters(), packagePredicate);
plugin.additionalPlugins = filterAndGetTypeWithMetadata(registeredPlugin.getAdditionalPlugins(), packagePredicate);
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated, packagePredicate);
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated, packagePredicate);
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated, packagePredicate);
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated, packagePredicate);
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated, packagePredicate);
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated, packagePredicate);
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated, packagePredicate);
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated, packagePredicate);
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated, packagePredicate);
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated, packagePredicate);
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated, packagePredicate);
plugin.additionalPlugins = filterAndGetClassName(registeredPlugin.getAdditionalPlugins(), includeDeprecated, packagePredicate);
return plugin;
}
@@ -107,18 +111,17 @@ public class Plugin {
* Those classes are only filtered from the documentation to ensure backward compatibility.
*
* @param list The list of classes?
* @param includeDeprecated whether to include deprecated plugins or not
* @return a filtered streams.
*/
private static List<PluginElementMetadata> filterAndGetTypeWithMetadata(final List<? extends Class<?>> list, Predicate<Class<?>> clazzFilter) {
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list, boolean includeDeprecated, Predicate<Class<?>> clazzFilter) {
return list
.stream()
.filter(not(io.kestra.core.models.Plugin::isInternal))
.filter(p -> includeDeprecated || !io.kestra.core.models.Plugin.isDeprecated(p))
.filter(clazzFilter)
.filter(c -> !c.getName().startsWith("org.kestra."))
.map(c -> new PluginElementMetadata(c.getName(), io.kestra.core.models.Plugin.isDeprecated(c) ? true : null))
.map(Class::getName)
.filter(c -> !c.startsWith("org.kestra."))
.toList();
}
public record PluginElementMetadata(String cls, Boolean deprecated) {
}
}

View File

@@ -1,25 +0,0 @@
package io.kestra.core.exceptions;
/**
* General exception that can be thrown when an AI service replies with an error.
* When propagated in the context of a REST API call, this exception should
* result in an HTTP 422 UNPROCESSABLE_ENTITY response.
*/
public class AiException extends KestraRuntimeException {
/**
* Creates a new {@link AiException} instance.
*/
public AiException() {
super();
}
/**
* Creates a new {@link AiException} instance.
*
* @param aiErrorMessage the AI error message.
*/
public AiException(final String aiErrorMessage) {
super(aiErrorMessage);
}
}

View File

@@ -1,35 +0,0 @@
package io.kestra.core.exceptions;
import java.io.Serial;
import java.util.List;
import lombok.Getter;
/**
* General exception that can be throws when a resource fail validation.
*/
public class ValidationErrorException extends KestraRuntimeException {
@Serial
private static final long serialVersionUID = 1L;
private static final String VALIDATION_ERROR_MESSAGE = "Resource fails validation";
@Getter
private transient final List<String> invalids;
/**
* Creates a new {@link ValidationErrorException} instance.
*
* @param invalids the invalid filters.
*/
public ValidationErrorException(final List<String> invalids) {
super(VALIDATION_ERROR_MESSAGE);
this.invalids = invalids;
}
public String formatedInvalidObjects(){
if (invalids == null || invalids.isEmpty()){
return VALIDATION_ERROR_MESSAGE;
}
return String.join(", ", invalids);
}
}

View File

@@ -8,7 +8,6 @@ import io.kestra.core.http.client.apache.*;
import io.kestra.core.http.client.configurations.HttpConfiguration;
import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.http.MediaType;
import jakarta.annotation.Nullable;
import lombok.Builder;
import lombok.extern.slf4j.Slf4j;
@@ -280,12 +279,10 @@ public class HttpClient implements Closeable {
private <T> T bodyHandler(Class<?> cls, HttpEntity entity) throws IOException, ParseException {
if (entity == null) {
return null;
} else if (String.class.isAssignableFrom(cls)) {
} else if (cls.isAssignableFrom(String.class)) {
return (T) EntityUtils.toString(entity);
} else if (Byte[].class.isAssignableFrom(cls)) {
} else if (cls.isAssignableFrom(Byte[].class)) {
return (T) ArrayUtils.toObject(EntityUtils.toByteArray(entity));
} else if (MediaType.APPLICATION_YAML.equals(entity.getContentType()) || "application/yaml".equals(entity.getContentType())) {
return (T) JacksonMapper.ofYaml().readValue(entity.getContent(), cls);
} else {
return (T) JacksonMapper.ofJson(false).readValue(entity.getContent(), cls);
}

View File

@@ -1,10 +1,11 @@
package io.kestra.core.models;
import io.kestra.core.utils.MapUtils;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotNull;
import java.util.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public record Label(@NotNull String key, @NotNull String value) {
@@ -28,36 +29,11 @@ public record Label(@NotNull String key, @NotNull String value) {
* @return the nested {@link Map}.
*/
public static Map<String, Object> toNestedMap(List<Label> labels) {
return MapUtils.flattenToNestedMap(toMap(labels));
}
/**
* Static helper method for converting a list of labels to a flat map.
* Key order is kept.
*
* @param labels The list of {@link Label} to be converted.
* @return the flat {@link Map}.
*/
public static Map<String, String> toMap(@Nullable List<Label> labels) {
if (labels == null || labels.isEmpty()) return Collections.emptyMap();
return labels.stream()
Map<String, Object> asMap = labels.stream()
.filter(label -> label.value() != null && label.key() != null)
// using an accumulator in case labels with the same key exists: the second is kept
.collect(Collectors.toMap(Label::key, Label::value, (first, second) -> second, LinkedHashMap::new));
}
/**
* Static helper method for deduplicating a list of labels by their key.
* Value of the last key occurrence is kept.
*
* @param labels The list of {@link Label} to be deduplicated.
* @return the deduplicated {@link List}.
*/
public static List<Label> deduplicate(@Nullable List<Label> labels) {
if (labels == null || labels.isEmpty()) return Collections.emptyList();
return toMap(labels).entrySet().stream()
.map(entry -> new Label(entry.getKey(), entry.getValue()))
.collect(Collectors.toCollection(ArrayList::new));
// using an accumulator in case labels with the same key exists: the first is kept
.collect(Collectors.toMap(Label::key, Label::value, (first, second) -> first));
return MapUtils.flattenToNestedMap(asMap);
}
/**

View File

@@ -6,9 +6,9 @@ import com.fasterxml.jackson.annotation.JsonValue;
import io.kestra.core.exceptions.InvalidQueryFiltersException;
import io.kestra.core.models.dashboards.filters.*;
import io.kestra.core.utils.Enums;
import java.util.ArrayList;
import lombok.Builder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -49,27 +49,42 @@ public record QueryFilter(
PREFIX
}
@SuppressWarnings("unchecked")
private List<Object> asValues(Object value) {
return value instanceof String valueStr ? Arrays.asList(valueStr.split(",")) : (List<Object>) value;
}
@SuppressWarnings("unchecked")
public <T extends Enum<T>> AbstractFilter<T> toDashboardFilterBuilder(T field, Object value) {
return switch (this.operation) {
case EQUALS -> EqualTo.<T>builder().field(field).value(value).build();
case NOT_EQUALS -> NotEqualTo.<T>builder().field(field).value(value).build();
case GREATER_THAN -> GreaterThan.<T>builder().field(field).value(value).build();
case LESS_THAN -> LessThan.<T>builder().field(field).value(value).build();
case GREATER_THAN_OR_EQUAL_TO -> GreaterThanOrEqualTo.<T>builder().field(field).value(value).build();
case LESS_THAN_OR_EQUAL_TO -> LessThanOrEqualTo.<T>builder().field(field).value(value).build();
case IN -> In.<T>builder().field(field).values(asValues(value)).build();
case NOT_IN -> NotIn.<T>builder().field(field).values(asValues(value)).build();
case STARTS_WITH -> StartsWith.<T>builder().field(field).value(value.toString()).build();
case ENDS_WITH -> EndsWith.<T>builder().field(field).value(value.toString()).build();
case CONTAINS -> Contains.<T>builder().field(field).value(value.toString()).build();
case REGEX -> Regex.<T>builder().field(field).value(value.toString()).build();
case PREFIX -> Regex.<T>builder().field(field).value("^" + value.toString().replace(".", "\\.") + "(?:\\..+)?$").build();
};
switch (this.operation) {
case EQUALS:
return EqualTo.<T>builder().field(field).value(value).build();
case NOT_EQUALS:
return NotEqualTo.<T>builder().field(field).value(value).build();
case GREATER_THAN:
return GreaterThan.<T>builder().field(field).value(value).build();
case LESS_THAN:
return LessThan.<T>builder().field(field).value(value).build();
case GREATER_THAN_OR_EQUAL_TO:
return GreaterThanOrEqualTo.<T>builder().field(field).value(value).build();
case LESS_THAN_OR_EQUAL_TO:
return LessThanOrEqualTo.<T>builder().field(field).value(value).build();
case IN:
return In.<T>builder().field(field).values(asValues(value)).build();
case NOT_IN:
return NotIn.<T>builder().field(field).values(asValues(value)).build();
case STARTS_WITH:
return StartsWith.<T>builder().field(field).value(value.toString()).build();
case ENDS_WITH:
return EndsWith.<T>builder().field(field).value(value.toString()).build();
case CONTAINS:
return Contains.<T>builder().field(field).value(value.toString()).build();
case REGEX:
return Regex.<T>builder().field(field).value(value.toString()).build();
case PREFIX:
return Regex.<T>builder().field(field).value("^" + value.toString().replace(".", "\\.") + "(?:\\..+)?$").build();
default:
throw new IllegalArgumentException("Unsupported operation: " + this.operation);
}
}
public enum Field {
@@ -139,12 +154,6 @@ public record QueryFilter(
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
}
},
EXECUTION_ID("executionId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
}
},
CHILD_FILTER("childFilter") {
@Override
public List<Op> supportedOp() {
@@ -219,7 +228,7 @@ public record QueryFilter(
@Override
public List<Field> supportedField() {
return List.of(Field.QUERY, Field.SCOPE, Field.NAMESPACE, Field.START_DATE,
Field.END_DATE, Field.FLOW_ID, Field.TRIGGER_ID, Field.MIN_LEVEL, Field.EXECUTION_ID
Field.END_DATE, Field.FLOW_ID, Field.TRIGGER_ID, Field.MIN_LEVEL
);
}
},

View File

@@ -62,7 +62,6 @@ public record ServiceUsage(
List<DailyServiceStatistics> statistics = Arrays
.stream(ServiceType.values())
.filter(it -> !it.equals(ServiceType.INVALID))
.map(type -> of(from, to, repository, type, interval))
.toList();
return new ServiceUsage(statistics);

View File

@@ -0,0 +1,74 @@
package io.kestra.core.models.collectors;
import io.kestra.core.models.ServerType;
import io.micronaut.core.annotation.Introspected;
import jakarta.annotation.Nullable;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.experimental.SuperBuilder;
import lombok.extern.jackson.Jacksonized;
import java.time.Instant;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Set;
@SuperBuilder(toBuilder = true)
@Getter
@Jacksonized
@Introspected
@AllArgsConstructor
public class Usage {
@NotNull
private final String uuid;
@NotNull
private final String startUuid;
@NotNull
private final String instanceUuid;
@NotNull
private final ServerType serverType;
@NotNull
private final String version;
@NotNull
private final ZoneId zoneId;
@Nullable
private final String uri;
@Nullable
private final Set<String> environments;
@NotNull
private final Instant startTime;
@Valid
private final HostUsage host;
@Valid
private final ConfigurationUsage configurations;
@Valid
private final List<PluginUsage> plugins;
@Valid
private final FlowUsage flows;
@Valid
private final ExecutionUsage executions;
@Valid
@Nullable
private ServiceUsage services;
@Valid
@Nullable
private List<PluginMetric> pluginMetrics;
}

View File

@@ -9,7 +9,6 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Streams;
import io.kestra.core.debug.Breakpoint;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.Label;
@@ -25,7 +24,6 @@ import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.services.LabelService;
import io.kestra.core.test.flow.TaskFixture;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.utils.MapUtils;
import io.swagger.v3.oas.annotations.Hidden;
import jakarta.annotation.Nullable;
@@ -122,9 +120,6 @@ public class Execution implements DeletedInterface, TenantInterface {
@Nullable
ExecutionKind kind;
@Nullable
List<Breakpoint> breakpoints;
/**
* Factory method for constructing a new {@link Execution} object for the given {@link Flow}.
*
@@ -132,12 +127,12 @@ public class Execution implements DeletedInterface, TenantInterface {
* @param labels The Flow labels.
* @return a new {@link Execution}.
*/
public static Execution newExecution(final FlowInterface flow, final List<Label> labels) {
public static Execution newExecution(final Flow flow, final List<Label> labels) {
return newExecution(flow, null, labels, Optional.empty());
}
public List<Label> getLabels() {
return ListUtils.emptyOnNull(this.labels);
return Optional.ofNullable(this.labels).orElse(new ArrayList<>());
}
/**
@@ -182,22 +177,8 @@ public class Execution implements DeletedInterface, TenantInterface {
}
/**
* Customization of Lombok-generated builder.
*/
public static class ExecutionBuilder {
/**
* Enforce unique values of {@link Label} when using the builder.
*
* @param labels The labels.
* @return Deduplicated labels.
*/
public ExecutionBuilder labels(List<Label> labels) {
this.labels = Label.deduplicate(labels);
return this;
}
void prebuild() {
this.originalId = this.id;
this.metadata = ExecutionMetadata.builder()
@@ -240,12 +221,12 @@ public class Execution implements DeletedInterface, TenantInterface {
this.scheduleDate,
this.traceParent,
this.fixtures,
this.kind,
this.breakpoints
this.kind
);
}
public Execution withLabels(List<Label> labels) {
return new Execution(
this.tenantId,
this.id,
@@ -255,7 +236,7 @@ public class Execution implements DeletedInterface, TenantInterface {
this.taskRunList,
this.inputs,
this.outputs,
Label.deduplicate(labels),
labels,
this.variables,
this.state,
this.parentId,
@@ -266,8 +247,7 @@ public class Execution implements DeletedInterface, TenantInterface {
this.scheduleDate,
this.traceParent,
this.fixtures,
this.kind,
this.breakpoints
this.kind
);
}
@@ -306,34 +286,7 @@ public class Execution implements DeletedInterface, TenantInterface {
this.scheduleDate,
this.traceParent,
this.fixtures,
this.kind,
this.breakpoints
);
}
public Execution withBreakpoints(List<Breakpoint> newBreakpoints) {
return new Execution(
this.tenantId,
this.id,
this.namespace,
this.flowId,
this.flowRevision,
this.taskRunList,
this.inputs,
this.outputs,
this.labels,
this.variables,
this.state,
this.parentId,
this.originalId,
this.trigger,
this.deleted,
this.metadata,
this.scheduleDate,
this.traceParent,
this.fixtures,
this.kind,
newBreakpoints
this.kind
);
}
@@ -359,8 +312,7 @@ public class Execution implements DeletedInterface, TenantInterface {
this.scheduleDate,
this.traceParent,
this.fixtures,
this.kind,
this.breakpoints
this.kind
);
}
@@ -414,7 +366,7 @@ public class Execution implements DeletedInterface, TenantInterface {
*
* @param resolvedTasks normal tasks
* @param resolvedErrors errors tasks
* @param resolvedFinally finally tasks
* @param resolvedErrors finally tasks
* @return the flow we need to follow
*/
public List<ResolvedTask> findTaskDependingFlowState(
@@ -441,28 +393,6 @@ public class Execution implements DeletedInterface, TenantInterface {
@Nullable List<ResolvedTask> resolvedErrors,
@Nullable List<ResolvedTask> resolvedFinally,
TaskRun parentTaskRun
) {
return findTaskDependingFlowState(resolvedTasks, resolvedErrors, resolvedFinally, parentTaskRun, null);
}
/**
* Determine if the current execution is on error &amp; normal tasks
* <p>
* if the current have errors, return tasks from errors if not, return the normal tasks
*
* @param resolvedTasks normal tasks
* @param resolvedErrors errors tasks
* @param resolvedFinally finally tasks
* @param parentTaskRun the parent task
* @param terminalState the parent task terminal state
* @return the flow we need to follow
*/
public List<ResolvedTask> findTaskDependingFlowState(
List<ResolvedTask> resolvedTasks,
@Nullable List<ResolvedTask> resolvedErrors,
@Nullable List<ResolvedTask> resolvedFinally,
TaskRun parentTaskRun,
@Nullable State.Type terminalState
) {
resolvedTasks = removeDisabled(resolvedTasks);
resolvedErrors = removeDisabled(resolvedErrors);
@@ -476,15 +406,10 @@ public class Execution implements DeletedInterface, TenantInterface {
return resolvedFinally == null ? Collections.emptyList() : resolvedFinally;
}
// check if the parent task should fail, and there is error tasks so we start them
if (errorsFlow.isEmpty() && terminalState == State.Type.FAILED) {
return resolvedErrors == null ? resolvedFinally == null ? Collections.emptyList() : resolvedFinally : resolvedErrors;
}
// Check if flow has failed tasks
// Check if flow has failed task
if (!errorsFlow.isEmpty() || this.hasFailed(resolvedTasks, parentTaskRun)) {
// Check if among the failed task, they will be retried
if (!this.hasFailedNoRetry(resolvedTasks, parentTaskRun) && terminalState != State.Type.FAILED) {
if (!this.hasFailedNoRetry(resolvedTasks, parentTaskRun)) {
return Collections.emptyList();
}
@@ -693,11 +618,6 @@ public class Execution implements DeletedInterface, TenantInterface {
public State.Type guessFinalState(List<ResolvedTask> currentTasks, TaskRun parentTaskRun,
boolean allowFailure, boolean allowWarning) {
return guessFinalState(currentTasks, parentTaskRun, allowFailure, allowWarning, State.Type.SUCCESS);
}
public State.Type guessFinalState(List<ResolvedTask> currentTasks, TaskRun parentTaskRun,
boolean allowFailure, boolean allowWarning, State.Type terminalState) {
List<TaskRun> taskRuns = this.findTaskRunByTasks(currentTasks, parentTaskRun);
var state = this
.findLastByState(taskRuns, State.Type.KILLED)
@@ -714,7 +634,7 @@ public class Execution implements DeletedInterface, TenantInterface {
.findLastByState(taskRuns, State.Type.PAUSED)
.map(taskRun -> taskRun.getState().getCurrent())
)
.orElse(terminalState);
.orElse(State.Type.SUCCESS);
if (state == State.Type.FAILED && allowFailure) {
if (allowWarning) {
@@ -904,7 +824,7 @@ public class Execution implements DeletedInterface, TenantInterface {
* @param e the current exception
* @return the {@link ILoggingEvent} waited to generate {@link LogEntry}
*/
public static ILoggingEvent loggingEventFromException(Throwable e) {
public static ILoggingEvent loggingEventFromException(Exception e) {
LoggingEvent loggingEvent = new LoggingEvent();
loggingEvent.setLevel(ch.qos.logback.classic.Level.ERROR);
loggingEvent.setThrowableProxy(new ThrowableProxy(e));
@@ -1072,16 +992,6 @@ public class Execution implements DeletedInterface, TenantInterface {
return result;
}
/**
* Find all children of this {@link TaskRun}.
*/
public List<TaskRun> findChildren(TaskRun parentTaskRun) {
return taskRunList.stream()
.filter(taskRun -> parentTaskRun.getId().equals(taskRun.getParentTaskRunId()))
.toList();
}
public List<String> findParentsValues(TaskRun taskRun, boolean withCurrent) {
return (withCurrent ?
Stream.concat(findParents(taskRun).stream(), Stream.of(taskRun)) :

View File

@@ -3,9 +3,8 @@ package io.kestra.core.models.executions;
/**
* Describe the kind of execution:
* - TEST: created by a test
* - PLAYGROUND: created by a playground
* - NORMAL: anything else, for backward compatibility NORMAL is not persisted but null is used instead
*/
public enum ExecutionKind {
NORMAL, TEST, PLAYGROUND
NORMAL, TEST
}

View File

@@ -7,7 +7,6 @@ import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.utils.IdUtils;
import io.swagger.v3.oas.annotations.Hidden;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import lombok.*;
@@ -63,11 +62,6 @@ public class TaskRun implements TenantInterface {
@With
Boolean dynamic;
// Set it to true to force execution even if the execution is killed
@Nullable
@With
Boolean forceExecution;
@Deprecated
public void setItems(String items) {
// no-op for backward compatibility
@@ -87,8 +81,7 @@ public class TaskRun implements TenantInterface {
this.outputs,
this.state.withState(state),
this.iteration,
this.dynamic,
this.forceExecution
this.dynamic
);
}
@@ -106,8 +99,7 @@ public class TaskRun implements TenantInterface {
this.outputs,
newState,
this.iteration,
this.dynamic,
this.forceExecution
this.dynamic
);
}
@@ -129,8 +121,7 @@ public class TaskRun implements TenantInterface {
this.outputs,
this.state.withState(State.Type.FAILED),
this.iteration,
this.dynamic,
this.forceExecution
this.dynamic
);
}
@@ -254,7 +245,7 @@ public class TaskRun implements TenantInterface {
* @return The next retry date, null if maxAttempt || maxDuration is reached
*/
public Instant nextRetryDate(AbstractRetry retry, Execution execution) {
if (retry.getMaxAttempts() != null && execution.getMetadata().getAttemptNumber() >= retry.getMaxAttempts()) {
if (retry.getMaxAttempt() != null && execution.getMetadata().getAttemptNumber() >= retry.getMaxAttempt()) {
return null;
}
@@ -274,7 +265,7 @@ public class TaskRun implements TenantInterface {
* @return The next retry date, null if maxAttempt || maxDuration is reached
*/
public Instant nextRetryDate(AbstractRetry retry) {
if (this.attempts == null || this.attempts.isEmpty() || (retry.getMaxAttempts() != null && this.attemptNumber() >= retry.getMaxAttempts())) {
if (this.attempts == null || this.attempts.isEmpty() || (retry.getMaxAttempt() != null && this.attemptNumber() >= retry.getMaxAttempt())) {
return null;
}

View File

@@ -3,8 +3,6 @@ package io.kestra.core.models.flows;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.tasks.WorkerGroup;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.swagger.v3.oas.annotations.Hidden;
@@ -38,8 +36,6 @@ public abstract class AbstractFlow implements FlowInterface {
@Min(value = 1)
Integer revision;
String description;
@Valid
List<Input<?>> inputs;
@@ -64,9 +60,6 @@ public abstract class AbstractFlow implements FlowInterface {
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
List<Label> labels;
@Schema(additionalProperties = Schema.AdditionalPropertiesValue.TRUE)
Map<String, Object> variables;
@Valid
private WorkerGroup workerGroup;
}

View File

@@ -19,6 +19,7 @@ import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.validations.ManualConstraintViolation;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.FlowService;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.validations.FlowValidation;
import io.micronaut.core.annotation.Introspected;
@@ -29,6 +30,8 @@ import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;
import lombok.*;
import lombok.experimental.SuperBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
@@ -61,11 +64,12 @@ public class Flow extends AbstractFlow implements HasUID {
}
});
String description;
Map<String, Object> variables;
@Valid
@NotEmpty
@Schema(additionalProperties = Schema.AdditionalPropertiesValue.TRUE)
List<Task> tasks;
@Valid
@@ -122,7 +126,7 @@ public class Flow extends AbstractFlow implements HasUID {
AbstractRetry retry;
@Valid
@PluginProperty
@PluginProperty(beta = true)
List<SLA> sla;
public Stream<String> allTypes() {
@@ -183,32 +187,19 @@ public class Flow extends AbstractFlow implements HasUID {
.toList();
}
public List<Task> allErrorsWithChildren() {
public List<Task> allErrorsWithChilds() {
var allErrors = allTasksWithChilds().stream()
.filter(task -> task.isFlowable() && ((FlowableTask<?>) task).getErrors() != null)
.flatMap(task -> ((FlowableTask<?>) task).getErrors().stream())
.collect(Collectors.toCollection(ArrayList::new));
if (!ListUtils.isEmpty(this.getErrors())) {
if (this.getErrors() != null && !this.getErrors().isEmpty()) {
allErrors.addAll(this.getErrors());
}
return allErrors;
}
public List<Task> allFinallyWithChildren() {
var allFinally = allTasksWithChilds().stream()
.filter(task -> task.isFlowable() && ((FlowableTask<?>) task).getFinally() != null)
.flatMap(task -> ((FlowableTask<?>) task).getFinally().stream())
.collect(Collectors.toCollection(ArrayList::new));
if (!ListUtils.isEmpty(this.getFinally())) {
allFinally.addAll(this.getFinally());
}
return allFinally;
}
public Task findParentTasksByTaskId(String taskId) {
return allTasksWithChilds()
.stream()

View File

@@ -4,7 +4,6 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.utils.IdUtils;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import java.util.Optional;
@@ -58,7 +57,6 @@ public interface FlowId {
@Getter
@AllArgsConstructor
@EqualsAndHashCode
class Default implements FlowId {
private final String tenantId;
private final String namespace;

View File

@@ -11,7 +11,6 @@ import io.kestra.core.models.HasUID;
import io.kestra.core.models.Label;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.sla.SLA;
import io.kestra.core.models.tasks.WorkerGroup;
import io.kestra.core.serializers.JacksonMapper;
import java.util.AbstractMap;
@@ -31,8 +30,6 @@ public interface FlowInterface extends FlowId, DeletedInterface, TenantInterface
Pattern YAML_REVISION_MATCHER = Pattern.compile("(?m)^revision: \\d+\n?");
String getDescription();
boolean isDisabled();
boolean isDeleted();
@@ -45,8 +42,6 @@ public interface FlowInterface extends FlowId, DeletedInterface, TenantInterface
Map<String, Object> getVariables();
WorkerGroup getWorkerGroup();
default Concurrency getConcurrency() {
return null;
}

View File

@@ -4,8 +4,6 @@ import com.fasterxml.jackson.annotation.JsonSetter;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.kestra.core.models.flows.input.*;
import io.kestra.core.models.property.Property;
import io.kestra.core.runners.RunContext;
import io.micronaut.core.annotation.Introspected;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.ConstraintViolationException;
@@ -18,8 +16,6 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.function.Function;
@SuppressWarnings("deprecation")
@SuperBuilder
@Getter
@@ -82,7 +78,7 @@ public abstract class Input<T> implements Data {
@Schema(
title = "The default value to use if no value is specified."
)
Property<T> defaults;
T defaults;
@Schema(
title = "The display name of the input."

View File

@@ -43,11 +43,4 @@ public class Output implements Data {
Type type;
String displayName;
/**
* Specifies whether the output is required or not.
* <p>
* By default, an output is always required.
*/
Boolean required;
}

View File

@@ -116,7 +116,7 @@ public class State {
}
public Instant maxDate() {
if (this.histories.isEmpty()) {
if (this.histories.size() == 0) {
return Instant.now();
}
@@ -124,7 +124,7 @@ public class State {
}
public Instant minDate() {
if (this.histories.isEmpty()) {
if (this.histories.size() == 0) {
return Instant.now();
}
@@ -168,16 +168,6 @@ public class State {
return this.current.isPaused();
}
@JsonIgnore
public boolean isBreakpoint() {
return this.current.isBreakpoint();
}
@JsonIgnore
public boolean isQueued() {
return this.current.isQueued();
}
@JsonIgnore
public boolean isRetrying() {
return this.current.isRetrying();
@@ -211,14 +201,6 @@ public class State {
return this.histories.get(this.histories.size() - 2).state.isPaused();
}
/**
* Return true if the execution has failed, then was restarted.
* This is to disambiguate between a RESTARTED after PAUSED and RESTARTED after FAILED state.
*/
public boolean failedThenRestarted() {
return this.current == Type.RESTARTED && this.histories.get(this.histories.size() - 2).state.isFailed();
}
@Introspected
public enum Type {
CREATED,
@@ -234,8 +216,7 @@ public class State {
QUEUED,
RETRYING,
RETRIED,
SKIPPED,
BREAKPOINT;
SKIPPED;
public boolean isTerminated() {
return this == Type.FAILED || this == Type.WARNING || this == Type.SUCCESS || this == Type.KILLED || this == Type.CANCELLED || this == Type.RETRIED || this == Type.SKIPPED;
@@ -261,10 +242,6 @@ public class State {
return this == Type.PAUSED;
}
public boolean isBreakpoint() {
return this == Type.BREAKPOINT;
}
public boolean isRetrying() {
return this == Type.RETRYING || this == Type.RETRIED;
}
@@ -277,10 +254,6 @@ public class State {
return this == Type.KILLED;
}
public boolean isQueued(){
return this == Type.QUEUED;
}
/**
* @return states that are terminal to an execution
*/

View File

@@ -20,8 +20,8 @@ public class FileInput extends Input<URI> {
private static final String DEFAULT_EXTENSION = ".upl";
@Deprecated(since = "0.24", forRemoval = true)
public String extension;
@Builder.Default
public String extension = DEFAULT_EXTENSION;
@Override
public void validate(URI input) throws ConstraintViolationException {
@@ -32,7 +32,6 @@ public class FileInput extends Input<URI> {
String res = inputs.stream()
.filter(in -> in instanceof FileInput)
.filter(in -> in.getId().equals(fileName))
.filter(flowInput -> ((FileInput) flowInput).getExtension() != null)
.map(flowInput -> ((FileInput) flowInput).getExtension())
.findFirst()
.orElse(FileInput.DEFAULT_EXTENSION);

View File

@@ -6,21 +6,19 @@ import jakarta.validation.ConstraintViolationException;
import jakarta.validation.constraints.NotNull;
/**
* Represents an input along with its associated value and validation state.
* Represents a
*
* @param input The {@link Input} definition of the flow.
* @param value The provided value for the input.
* @param enabled {@code true} if the input is enabled; {@code false} otherwise.
* @param isDefault {@code true} if the provided value is the default; {@code false} otherwise.
* @param exception The validation exception, if the input value is invalid; {@code null} otherwise.
* @param input The flow's {@link Input}.
* @param value The flow's input value/data.
* @param enabled Specify whether the input is enabled.
* @param exception The input validation exception.
*/
public record InputAndValue(
Input<?> input,
Object value,
boolean enabled,
boolean isDefault,
ConstraintViolationException exception) {
/**
* Creates a new {@link InputAndValue} instance.
*
@@ -28,6 +26,6 @@ public record InputAndValue(
* @param value The value.
*/
public InputAndValue(@NotNull Input<?> input, @Nullable Object value) {
this(input, value, true, false, null);
this(input, value, true, null);
}
}

View File

@@ -108,7 +108,7 @@ public class MultiselectInput extends Input<List<String>> implements ItemTypeInt
private List<String> renderExpressionValues(final Function<String, Object> renderer) {
Object result;
try {
result = renderer.apply(expression.trim());
result = renderer.apply(expression);
} catch (Exception e) {
throw ManualConstraintViolation.toConstraintViolationException(
"Cannot render 'expression'. Cause: " + e.getMessage(),

View File

@@ -86,7 +86,7 @@ public class SelectInput extends Input<String> implements RenderableInput {
private List<String> renderExpressionValues(final Function<String, Object> renderer) {
Object result;
try {
result = renderer.apply(expression.trim());
result = renderer.apply(expression);
} catch (Exception e) {
throw ManualConstraintViolation.toConstraintViolationException(
"Cannot render 'expression'. Cause: " + e.getMessage(),

View File

@@ -136,7 +136,7 @@ public class Data {
Structured data items can be defined in the following ways:
- A single item as a map (a document).
- A list of items as a list of maps (a list of documents).
- A URI, supported schemes are `kestra` for internal storage files, `file` for host local files, and `nsfile` for namespace files.
- A URI, supported schemes are `kestra` for internal storage files, and `file` for host local files.
- A JSON String that will then be serialized either as a single item or a list of items.""";
@Schema(

View File

@@ -68,19 +68,6 @@ public class Property<T> {
String getExpression() {
return expression;
}
/**
* Returns a new {@link Property} with no cached rendered value,
* so that the next render will evaluate its original Pebble expression.
* <p>
* The returned property will still cache its rendered result.
* To re-evaluate on a subsequent render, call {@code skipCache()} again.
*
* @return a new {@link Property} without a pre-rendered value
*/
public Property<T> skipCache() {
return Property.ofExpression(expression);
}
/**
* Build a new Property object with a value already set.<br>
@@ -145,8 +132,8 @@ public class Property<T> {
*
* @see io.kestra.core.runners.RunContextProperty#as(Class)
*/
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz) throws IllegalVariableEvaluationException {
return as(property, context, clazz, Map.of());
public static <T> T as(Property<T> property, RunContext runContext, Class<T> clazz) throws IllegalVariableEvaluationException {
return as(property, runContext, clazz, Map.of());
}
/**
@@ -156,9 +143,9 @@ public class Property<T> {
*
* @see io.kestra.core.runners.RunContextProperty#as(Class, Map)
*/
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
public static <T> T as(Property<T> property, RunContext runContext, Class<T> clazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
if (property.value == null) {
String rendered = context.render(property.expression, variables);
String rendered = runContext.render(property.expression, variables);
property.value = MAPPER.convertValue(rendered, clazz);
}
@@ -172,8 +159,8 @@ public class Property<T> {
*
* @see io.kestra.core.runners.RunContextProperty#asList(Class)
*/
public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz) throws IllegalVariableEvaluationException {
return asList(property, context, itemClazz, Map.of());
public static <T, I> T asList(Property<T> property, RunContext runContext, Class<I> itemClazz) throws IllegalVariableEvaluationException {
return asList(property, runContext, itemClazz, Map.of());
}
/**
@@ -184,7 +171,7 @@ public class Property<T> {
* @see io.kestra.core.runners.RunContextProperty#asList(Class, Map)
*/
@SuppressWarnings("unchecked")
public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
public static <T, I> T asList(Property<T> property, RunContext runContext, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
if (property.value == null) {
JavaType type = MAPPER.getTypeFactory().constructCollectionLikeType(List.class, itemClazz);
try {
@@ -192,7 +179,7 @@ public class Property<T> {
// We need to detect if the expression is already a list or if it's a pebble expression (for eg. referencing a variable containing a list).
// Doing that allows us to, if it's an expression, first render then read it as a list.
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
property.value = MAPPER.readValue(context.render(property.expression, variables), type);
property.value = MAPPER.readValue(runContext.render(property.expression, variables), type);
}
// Otherwise, if it's already a list, we read it as a list first then render it from run context which handle list rendering by rendering each item of the list
else {
@@ -200,9 +187,9 @@ public class Property<T> {
property.value = (T) asRawList.stream()
.map(throwFunction(item -> {
if (item instanceof String str) {
return MAPPER.convertValue(context.render(str, variables), itemClazz);
return MAPPER.convertValue(runContext.render(str, variables), itemClazz);
} else if (item instanceof Map map) {
return MAPPER.convertValue(context.render(map, variables), itemClazz);
return MAPPER.convertValue(runContext.render(map, variables), itemClazz);
}
return item;
}))

View File

@@ -1,38 +0,0 @@
package io.kestra.core.models.property;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.runners.VariableRenderer;
import java.util.Map;
/**
* Contextual object for rendering properties.
*
* @see Property
*/
public interface PropertyContext {
String render(String inline, Map<String, Object> variables) throws IllegalVariableEvaluationException;
Map<String, Object> render(Map<String, Object> inline, Map<String, Object> variables) throws IllegalVariableEvaluationException;
/**
* Static helper method for creating a new {@link PropertyContext} from a given {@link VariableRenderer}.
*
* @param renderer the {@link VariableRenderer}.
* @return a new {@link PropertyContext}.
*/
static PropertyContext create(final VariableRenderer renderer) {
return new PropertyContext() {
@Override
public String render(String inline, Map<String, Object> variables) throws IllegalVariableEvaluationException {
return renderer.render(inline, variables);
}
@Override
public Map<String, Object> render(Map<String, Object> inline, Map<String, Object> variables) throws IllegalVariableEvaluationException {
return renderer.render(inline, variables);
}
};
}
}

View File

@@ -1,14 +1,15 @@
package io.kestra.core.models.property;
import io.kestra.core.runners.LocalPath;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.storages.Namespace;
import io.kestra.core.storages.StorageContext;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
/**
@@ -16,7 +17,8 @@ import java.util.List;
* It supports reading from the following schemes: {@link #SUPPORTED_SCHEMES}.
*/
public class URIFetcher {
private static final List<String> SUPPORTED_SCHEMES = List.of(StorageContext.KESTRA_SCHEME, LocalPath.FILE_SCHEME, Namespace.NAMESPACE_FILE_SCHEME);
private static final String FILE_SCHEME = "file";
private static final List<String> SUPPORTED_SCHEMES = List.of(StorageContext.KESTRA_SCHEME, FILE_SCHEME);
private final URI uri;
@@ -66,14 +68,6 @@ public class URIFetcher {
return SUPPORTED_SCHEMES.stream().anyMatch(scheme -> uri.startsWith(scheme + "://"));
}
/**
* Whether the URI is supported by the Fetcher.
* A supported URI is a URI which scheme is one of the {@link #SUPPORTED_SCHEMES}.
*/
public static boolean supports(URI uri) {
return uri.getScheme() != null && SUPPORTED_SCHEMES.contains(uri.getScheme());
}
/**
* Fetch the resource pointed by this SmartURI
*
@@ -88,11 +82,23 @@ public class URIFetcher {
// we need to first check the protocol, then create one reader by protocol
return switch (uri.getScheme()) {
case StorageContext.KESTRA_SCHEME -> runContext.storage().getFile(uri);
case LocalPath.FILE_SCHEME -> runContext.localPath().get(uri);
case Namespace.NAMESPACE_FILE_SCHEME -> {
var namespace = uri.getAuthority() == null ? runContext.storage().namespace() : runContext.storage().namespace(uri.getAuthority());
var nsFileUri = namespace.get(Path.of(uri.getPath())).uri();
yield runContext.storage().getFile(nsFileUri);
case FILE_SCHEME -> {
Path path = Path.of(uri).toRealPath(); // toRealPath() will protect about path traversal issues
Path workingDirectory = runContext.workingDir().path();
if (!path.startsWith(workingDirectory)) {
// we need to check that it's on an allowed path
List<String> globalAllowedPaths = ((DefaultRunContext) runContext).getApplicationContext().getProperty("kestra.plugins.allowed-paths", List.class, Collections.emptyList());
if (globalAllowedPaths.stream().noneMatch(path::startsWith)) {
// if not globally allowed, we check it's allowed for this specific plugin
List<String> pluginAllowedPaths = (List<String>) runContext.pluginConfiguration("allowed-paths").orElse(Collections.emptyList());
if (pluginAllowedPaths.stream().noneMatch(path::startsWith)) {
throw new SecurityException("The path " + path + " is not authorized. " +
"Only files inside the working directory are allowed by default, other path must be allowed either globally inside the Kestra configuration using the `kestra.plugins.allowed-paths` property, " +
"or by plugin using the `allowed-paths` plugin configuration.");
}
}
}
yield new FileInputStream(path.toFile());
}
default -> throw new IllegalArgumentException("Scheme not supported: " + uri.getScheme());
};

View File

@@ -1,20 +0,0 @@
package io.kestra.core.models.tasks;
import io.micronaut.core.annotation.Introspected;
import jakarta.validation.constraints.NotNull;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import java.time.Duration;
@Getter
@NoArgsConstructor
@AllArgsConstructor
@Introspected
public class Cache {
@NotNull
private Boolean enabled;
private Duration ttl;
}

View File

@@ -7,12 +7,7 @@ import java.util.Map;
public interface InputFilesInterface {
@Schema(
title = "The files to create on the working. It can be a map or a JSON object.",
description = """
Each file can be defined:
- Inline with its content
- As a URI, supported schemes are `kestra` for internal storage files, `file` for host local files, and `nsfile` for namespace files.
""",
title = "The files to create on the local filesystem. It can be a map or a JSON object.",
oneOf = {Map.class, String.class}
)
@PluginProperty(dynamic = true)

View File

@@ -49,10 +49,4 @@ public class NamespaceFiles {
)
@Builder.Default
private Property<FileExistComportment> ifExists = Property.ofValue(FileExistComportment.OVERWRITE);
@Schema(
title = "Whether to mount file into the root of the working directory, or create a folder per namespace"
)
@Builder.Default
private Property<Boolean> folderPerNamespace = Property.ofValue(false);
}

View File

@@ -11,7 +11,6 @@ import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.runners.RunContext;
import io.kestra.plugin.core.flow.WorkingDirectory;
import jakarta.validation.Valid;
import jakarta.validation.constraints.Size;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
@@ -29,7 +28,6 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
@Plugin
abstract public class Task implements TaskInterface {
@Size(max = 256, message = "Task id must be at most 256 characters")
protected String id;
protected String type;
@@ -74,10 +72,6 @@ abstract public class Task implements TaskInterface {
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
private boolean allowWarning = false;
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
@Valid
private Cache taskCache;
public Optional<Task> findById(String id) {
if (this.getId().equals(id)) {
return Optional.of(this);

View File

@@ -1,5 +1,6 @@
package io.kestra.core.models.tasks;
import io.kestra.core.validations.WorkerGroupValidation;
import io.micronaut.core.annotation.Introspected;
import lombok.AllArgsConstructor;
import lombok.Getter;
@@ -9,6 +10,7 @@ import lombok.NoArgsConstructor;
@NoArgsConstructor
@AllArgsConstructor
@Introspected
@WorkerGroupValidation
public class WorkerGroup {
private String key;

View File

@@ -29,18 +29,8 @@ public abstract class AbstractRetry {
private Duration maxDuration;
@Deprecated(forRemoval = true)
public Integer getMaxAttempt() {
return maxAttempts;
}
@Deprecated(forRemoval = true)
public void setMaxAttempt(@Min(1) Integer maxAttempt) {
this.maxAttempts = maxAttempt;
}
@Min(1)
private Integer maxAttempts;
private Integer maxAttempt;
@Builder.Default
private Boolean warningOnRetry = false;
@@ -56,8 +46,8 @@ public abstract class AbstractRetry {
builder.withMaxDuration(maxDuration);
}
if (this.maxAttempts != null) {
builder.withMaxAttempts(this.maxAttempts);
if (this.maxAttempt != null) {
builder.withMaxAttempts(this.maxAttempt);
}
return builder;
}

View File

@@ -100,7 +100,7 @@ abstract public class PluginUtilsService {
@SuppressWarnings("unchecked")
public static Map<String, String> transformInputFiles(RunContext runContext, Map<String, Object> additionalVars, @NotNull Object inputFiles) throws IllegalVariableEvaluationException, JsonProcessingException {
if (inputFiles instanceof Map) {
Map<String, String> castedInputFiles = (Map<String, String>) inputFiles;
Map<String, String> castedInputFiles = (Map<String, String>) ((Map<?, ?>) inputFiles);
Map<String, String> nullFilteredInputFiles = new HashMap<>();
castedInputFiles.forEach((key, val) -> {
if (val != null) {
@@ -110,6 +110,7 @@ abstract public class PluginUtilsService {
return runContext.renderMap(nullFilteredInputFiles, additionalVars);
} else if (inputFiles instanceof String inputFileString) {
return JacksonMapper.ofJson(false).readValue(
runContext.render(inputFileString, additionalVars),
MAP_TYPE_REFERENCE

Some files were not shown because too many files have changed in this diff Show More