Compare commits

..

1 Commits

Author SHA1 Message Date
Loïc Mathieu
485f9a3669 feat(jdbc): Improve internal queue cleaning
Instead of cleaning queues via the JdbcCleaner, or via queues.deleteByIds(), directly clean some queues after processing.
We only do this for queues that are known to have a single consumer, for these queues, instead of updating the offsets after consumption, we remove directly the records.
2025-04-07 17:52:05 +02:00
1213 changed files with 26884 additions and 40330 deletions

View File

@@ -1,6 +1,5 @@
FROM ubuntu:24.04
ARG BUILDPLATFORM
ARG DEBIAN_FRONTEND=noninteractive
USER root
@@ -32,23 +31,9 @@ ENV SHELL=/bin/zsh
# --------------------------------------
# Java
# --------------------------------------
ARG OS_ARCHITECTURE
RUN mkdir -p /usr/java
RUN echo "Building on platform: $BUILDPLATFORM"
RUN case "$BUILDPLATFORM" in \
"linux/amd64") OS_ARCHITECTURE="x64_linux" ;; \
"linux/arm64") OS_ARCHITECTURE="aarch64_linux" ;; \
"darwin/amd64") OS_ARCHITECTURE="x64_mac" ;; \
"darwin/arm64") OS_ARCHITECTURE="aarch64_mac" ;; \
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
esac && \
wget "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz" && \
mv OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz openjdk-21.0.7.tar.gz
RUN tar -xzvf openjdk-21.0.7.tar.gz && \
mv jdk-21.0.7+6 jdk-21 && \
mv jdk-21 /usr/java/
ENV JAVA_HOME=/usr/java/jdk-21
RUN wget https://download.oracle.com/java/21/latest/jdk-21_linux-x64_bin.deb
RUN dpkg -i ./jdk-21_linux-x64_bin.deb
ENV JAVA_HOME=/usr/java/jdk-21-oracle-x64
ENV PATH="$PATH:$JAVA_HOME/bin"
# Will load a custom configuration file for Micronaut
ENV MICRONAUT_ENVIRONMENTS=local,override

View File

@@ -39,7 +39,7 @@
"yoavbls.pretty-ts-errors",
"github.vscode-github-actions",
"vscjava.vscode-java-pack",
"docker.docker"
"ms-azuretools.vscode-docker"
]
}
}

View File

@@ -37,10 +37,6 @@ The following dependencies are required to build Kestra locally:
- Docker & Docker Compose
- an IDE (Intellij IDEA, Eclipse or VS Code)
Thanks to the Kestra community, if using VSCode, you can also start development on either the frontend or backend with a bootstrapped docker container without the need to manually set up the environment.
Check out the [README](../.devcontainer/README.md) for set-up instructions and the associated [Dockerfile](../.devcontainer/Dockerfile) in the respository to get started.
To start contributing:
- [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the repository
- Clone the fork on your workstation:
@@ -50,7 +46,7 @@ git clone git@github.com:{YOUR_USERNAME}/kestra.git
cd kestra
```
#### Develop on the backend
#### Develop backend
The backend is made with [Micronaut](https://micronaut.io).
Open the cloned repository in your favorite IDE. In most of decent IDEs, Gradle build will be detected and all dependencies will be downloaded.
@@ -76,7 +72,7 @@ python3 -m pip install virtualenv
```
#### Develop on the frontend
#### Develop frontend
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
- `npm install`

View File

@@ -1,31 +1,26 @@
# See GitHub's docs for more information on this file:
# https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
# Check for updates to GitHub Actions every week
interval: "weekly"
day: "wednesday"
time: "08:00"
timezone: "Europe/Paris"
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
open-pull-requests-limit: 50
# Maintain dependencies for Gradle modules
- package-ecosystem: "gradle"
directory: "/"
schedule:
# Check for updates to Gradle modules every week
interval: "weekly"
day: "wednesday"
time: "08:00"
timezone: "Europe/Paris"
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
open-pull-requests-limit: 50
# Maintain dependencies for NPM modules
- package-ecosystem: "npm"
@@ -36,15 +31,8 @@ updates:
time: "08:00"
timezone: "Europe/Paris"
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
labels: ["dependency-upgrade"]
ignore:
# Ignore updates of version 1.x, as we're using the beta of 2.x (still in beta)
# Ignore updates of version 1.x, as we're using beta of 2.x
- dependency-name: "vue-virtual-scroller"
versions:
- "1.x"
# Ignore updates to monaco-yaml, version is pinned to 5.3.1 due to patch-package script additions
- dependency-name: "monaco-yaml"
versions:
- ">=5.3.2"
versions: ["1.x"]

View File

@@ -62,7 +62,7 @@ jobs:
- name: Build with Gradle
if: ${{ matrix.language == 'java' }}
run: ./gradlew testClasses -x :ui:assembleFrontend
run: ./gradlew testClasses -x :ui:installFrontend -x :ui:assembleFrontend
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)

View File

@@ -51,7 +51,7 @@ jobs:
python-libs: ""
- name: ""
plugins: ${{needs.plugins.outputs.plugins}}
packages: python3 python-is-python3 python3-pip curl jattach
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
python-libs: kestra
steps:
- uses: actions/checkout@v4

View File

@@ -1,77 +1,158 @@
name: 'E2E tests revival'
description: 'New E2E tests implementation started by Roman. Based on playwright in npm UI project, tests Kestra OSS develop docker image. These tests are written from zero, lets make them unflaky from the start!.'
name: 'Reusable Workflow for Running End-to-End Tests'
on:
schedule:
- cron: "0 * * * *" # Every hour
workflow_call:
inputs:
noInputYet:
description: 'not input yet.'
required: false
tags:
description: "Tags used for filtering tests to include for QA."
type: string
default: "no input"
workflow_dispatch:
inputs:
noInputYet:
description: 'not input yet.'
required: false
required: true
docker-artifact-name:
description: "The GitHub artifact containing the Kestra docker image."
type: string
default: "no input"
required: false
docker-image-tag:
description: "The Docker image Tag for Kestra"
default: 'kestra/kestra:develop'
type: string
required: true
backend:
description: "The Kestra backend type to be used for E2E tests."
type: string
required: true
default: "postgres"
secrets:
GITHUB_AUTH_TOKEN:
description: "The GitHub Token."
required: true
GOOGLE_SERVICE_ACCOUNT:
description: "The Google Service Account."
required: false
jobs:
check:
timeout-minutes: 10
timeout-minutes: 60
runs-on: ubuntu-latest
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
E2E_TEST_DOCKER_DIR: ./kestra/e2e-tests/docker
KESTRA_BASE_URL: http://127.27.27.27:8080/ui/
steps:
- name: Login to DockerHub
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
# Checkout kestra
- name: Checkout kestra
uses: actions/checkout@v4
with:
path: kestra
- name: Install Npm dependencies
# Setup build
- uses: kestra-io/actions/.github/actions/setup-build@main
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
# Get Docker Image
- name: Download Kestra Image
if: inputs.docker-artifact-name != ''
uses: actions/download-artifact@v4
with:
name: ${{ inputs.docker-artifact-name }}
path: /tmp
- name: Load Kestra Image
if: inputs.docker-artifact-name != ''
run: |
cd kestra/ui
npm i
npx playwright install --with-deps chromium
docker load --input /tmp/${{ inputs.docker-artifact-name }}.tar
# Docker Compose
- name: Login to DockerHub
uses: docker/login-action@v3
if: inputs.docker-artifact-name == ''
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
# Build configuration
- name: Create additional application configuration
run: |
touch ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
- name: Setup additional application configuration
if: env.APPLICATION_SECRETS != null
env:
APPLICATION_SECRETS: ${{ secrets.APPLICATION_SECRETS }}
run: |
echo $APPLICATION_SECRETS | base64 -d > ${{ env.E2E_TEST_DOCKER_DIR }}/data/application-secrets.yml
# Deploy Docker Compose Stack
- name: Run Kestra (${{ inputs.backend }})
env:
KESTRA_DOCKER_IMAGE: ${{ inputs.docker-image-tag }}
run: |
cd ${{ env.E2E_TEST_DOCKER_DIR }}
echo "KESTRA_DOCKER_IMAGE=$KESTRA_DOCKER_IMAGE" >> .env
docker compose -f docker-compose-${{ inputs.backend }}.yml up -d
- name: Install Playwright Deps
run: |
cd kestra
./gradlew playwright --args="install-deps"
# Run E2E Tests
- name: Wait For Kestra UI
run: |
# Start time
START_TIME=$(date +%s)
# Timeout duration in seconds (5 minutes)
TIMEOUT_DURATION=$((5 * 60))
while [ $(curl -s -L -o /dev/null -w %{http_code} $KESTRA_BASE_URL) != 200 ]; do
echo -e $(date) "\tKestra server HTTP state: " $(curl -k -L -s -o /dev/null -w %{http_code} $KESTRA_BASE_URL) " (waiting for 200)";
# Check the elapsed time
CURRENT_TIME=$(date +%s)
ELAPSED_TIME=$((CURRENT_TIME - START_TIME))
# Break the loop if the elapsed time exceeds the timeout duration
if [ $ELAPSED_TIME -ge $TIMEOUT_DURATION ]; then
echo "Timeout reached: Exiting after 5 minutes."
exit 1;
fi
sleep 2;
done;
echo "Kestra is running: $KESTRA_BASE_URL 🚀";
continue-on-error: true
- name: Run E2E Tests (${{ inputs.tags }})
if: inputs.tags != ''
run: |
cd kestra
./gradlew e2eTestsCheck -P tags=${{ inputs.tags }}
- name: Run E2E Tests
if: inputs.tags == ''
run: |
cd kestra/ui
npm run test:e2e
cd kestra
./gradlew e2eTestsCheck
- name: Upload Playwright Report as Github artifact
# 'With this report, you can analyze locally the results of the tests. see https://playwright.dev/docs/ci-intro#html-report'
uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
# Allure check
- name: Auth to Google Cloud
id: auth
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
uses: 'google-github-actions/auth@v2'
with:
name: playwright-report
path: kestra/playwright-report/
retention-days: 7
# Allure check
# TODO I don't know what it should do
# - uses: rlespinasse/github-slug-action@v5
# name: Allure - Generate slug variables
#
# - name: Allure - Publish report
# uses: andrcuns/allure-publish-action@v2.9.0
# if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
# continue-on-error: true
# env:
# GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
# JAVA_HOME: /usr/lib/jvm/default-jvm/
# with:
# storageType: gcs
# resultsGlob: "**/build/allure-results"
# bucket: internal-kestra-host
# baseUrl: "https://internal.dev.kestra.io"
# prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
# copyLatest: true
# ignoreMissingResults: true
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
- uses: rlespinasse/github-slug-action@v5
- name: Publish allure report
uses: andrcuns/allure-publish-action@v2.9.0
if: ${{ !cancelled() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
env:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
JAVA_HOME: /usr/lib/jvm/default-jvm/
with:
storageType: gcs
resultsGlob: build/allure-results
bucket: internal-kestra-host
baseUrl: "https://internal.dev.kestra.io"
prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/playwright') }}
copyLatest: true
ignoreMissingResults: true

View File

@@ -62,6 +62,6 @@ jobs:
echo "No changes to commit. Exiting with success."
exit 0
fi
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
git commit -m "chore(translations): localize to languages other than English"
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller

View File

@@ -27,7 +27,7 @@ jobs:
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$"
exit 1
fi
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
exit 1;
@@ -36,7 +36,6 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
path: kestra
# Checkout GitHub Actions
- uses: actions/checkout@v4
@@ -63,20 +62,18 @@ jobs:
- name: Run Gradle Release
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
run: |
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
cd kestra
# Create and push release branch
git checkout -b "$PUSH_RELEASE_BRANCH";
git push -u origin "$PUSH_RELEASE_BRANCH";
# Run gradle release
git checkout develop;
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
# -SNAPSHOT qualifier maybe used to test release-candidates
./gradlew release -Prelease.useAutomaticVersion=true \

View File

@@ -52,14 +52,15 @@ jobs:
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
steps:
- name: Trigger EE Workflow
uses: peter-evans/repository-dispatch@v3
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
# Update
- name: Github - Update internal
uses: benc-uk/workflow-dispatch@v1
if: github.ref == 'refs/heads/develop' && needs.docker.result == 'success'
with:
workflow: oss-build.yml
repo: kestra-io/infra
ref: master
token: ${{ secrets.GH_PERSONAL_TOKEN }}
repository: kestra-io/kestra-ee
event-type: "oss-updated"
# Slack
- name: Slack - Notification

View File

@@ -22,11 +22,11 @@ jobs:
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
exit 1
fi
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
CURRENT_BRANCH="$GITHUB_REF"
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
@@ -54,4 +54,4 @@ jobs:
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
git push
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
git push --tags
git push origin "v$RELEASE_VERSION"

View File

@@ -8,9 +8,6 @@ on:
env:
JAVA_VERSION: '21'
permissions:
contents: read
jobs:
dependency-check:
name: Dependency Check
@@ -60,10 +57,6 @@ jobs:
develop-image-check:
name: Image Check (develop)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v4
@@ -87,28 +80,16 @@ jobs:
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.31.0
uses: aquasecurity/trivy-action@0.30.0
with:
image-ref: kestra/kestra:develop
format: 'template'
template: '@/contrib/sarif.tpl'
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
format: table
skip-dirs: /app/plugins
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
category: docker-
scanners: vuln
latest-image-check:
name: Image Check (latest)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v4
@@ -132,16 +113,9 @@ jobs:
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@0.31.0
uses: aquasecurity/trivy-action@0.30.0
with:
image-ref: kestra/kestra:latest
format: table
skip-dirs: /app/plugins
scanners: vuln
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
scanners: vuln

View File

@@ -31,8 +31,6 @@ jobs:
steps:
- uses: actions/checkout@v4
name: Checkout - Current ref
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/.github/actions/setup-build@main

View File

@@ -19,28 +19,14 @@ jobs:
name: Frontend - Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
- id: checkout
name: Checkout - Current ref
uses: actions/checkout@v4
- name: Cache Node Modules
id: cache-node-modules
uses: actions/cache@v4
with:
path: |
ui/node_modules
key: modules-${{ hashFiles('ui/package-lock.json') }}
- name: Cache Playwright Binaries
id: cache-playwright
uses: actions/cache@v4
with:
path: |
~/.cache/ms-playwright
key: playwright-${{ hashFiles('ui/package-lock.json') }}
ref: ${{ github.head_ref }}
- name: Npm - install
shell: bash
if: steps.cache-node-modules.outputs.cache-hit != 'true'
working-directory: ui
run: npm ci
@@ -58,17 +44,29 @@ jobs:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: npm run build
- name: Storybook - Install Playwright
shell: bash
working-directory: ui
if: steps.cache-playwright.outputs.cache-hit != 'true'
run: npx playwright install --with-deps
- name: Run front-end unit tests
shell: bash
working-directory: ui
run: npm run test:cicd
- name: Storybook - Install Playwright
shell: bash
working-directory: ui
run: npx playwright install --with-deps
- name: Storybook - Build
shell: bash
working-directory: ui
run: npm run build-storybook --quiet
- name: Storybook - Run tests
shell: bash
working-directory: ui
run: |
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
"npx http-server storybook-static --port 6006 --silent" \
"npx wait-on tcp:127.0.0.1:6006 && npm run test:storybook"
- name: Codecov - Upload coverage reports
uses: codecov/codecov-action@v5
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}

View File

@@ -6,21 +6,19 @@ on:
GH_PERSONAL_TOKEN:
description: "The Github personal token."
required: true
push:
tags:
- '*'
jobs:
publish:
name: Github - Release
runs-on: ubuntu-latest
steps:
# Check out
- name: Checkout - Repository
uses: actions/checkout@v4
# Download Exec
- name: Artifacts - Download executable
uses: actions/download-artifact@v4
if: startsWith(github.ref, 'refs/tags/v')
with:
fetch-depth: 0
submodules: true
name: exe
path: build/executable
# Checkout GitHub Actions
- name: Checkout - Actions
@@ -32,27 +30,18 @@ jobs:
sparse-checkout: |
.github/actions
# Download Exec
# Must be done after checkout actions
- name: Artifacts - Download executable
uses: actions/download-artifact@v4
if: startsWith(github.ref, 'refs/tags/v')
with:
name: exe
path: build/executable
# GitHub Release
- name: Create GitHub release
uses: ./actions/.github/actions/github-release
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
env:
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
# Trigger gha workflow to bump helm chart version
- name: GitHub - Trigger the Helm chart version bump
uses: peter-evans/repository-dispatch@v3
if: steps.create_github_release.conclusion == 'success'
with:
token: ${{ secrets.GH_PERSONAL_TOKEN }}
repository: kestra-io/helm-charts

1
.gitignore vendored
View File

@@ -59,4 +59,3 @@ core/src/main/resources/gradle.properties
*storybook.log
storybook-static
/jmh-benchmarks/src/main/resources/gradle.properties

View File

@@ -33,11 +33,9 @@
#plugin-github:io.kestra.plugin:plugin-github:LATEST
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
#plugin-graphql:io.kestra.plugin:plugin-graphql:LATEST
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
#plugin-influxdb:io.kestra.plugin:plugin-influxdb:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-as400:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-clickhouse:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-db2:LATEST
@@ -58,12 +56,9 @@
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-arrow-flight:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sqlite:LATEST
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
#plugin-langchain4j:io.kestra.plugin:plugin-langchain4j:LATEST
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
@@ -75,13 +70,11 @@
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
@@ -95,7 +88,6 @@
#plugin-scripts:io.kestra.plugin:plugin-script-shell:LATEST
#plugin-serdes:io.kestra.plugin:plugin-serdes:LATEST
#plugin-servicenow:io.kestra.plugin:plugin-servicenow:LATEST
#plugin-sifflet:io.kestra.plugin:plugin-sifflet:LATEST
#plugin-singer:io.kestra.plugin:plugin-singer:LATEST
#plugin-soda:io.kestra.plugin:plugin-soda:LATEST
#plugin-solace:io.kestra.plugin:plugin-solace:LATEST

View File

@@ -16,9 +16,8 @@ RUN apt-get update -y && \
if [ -n "${APT_PACKAGES}" ]; then apt-get install -y --no-install-recommends ${APT_PACKAGES}; fi && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /var/tmp/* /tmp/* && \
curl -LsSf https://astral.sh/uv/0.6.17/install.sh | sh && mv /root/.local/bin/uv /bin && mv /root/.local/bin/uvx /bin && \
if [ -n "${KESTRA_PLUGINS}" ]; then /app/kestra plugins install ${KESTRA_PLUGINS} && rm -rf /tmp/*; fi && \
if [ -n "${PYTHON_LIBRARIES}" ]; then uv pip install --system ${PYTHON_LIBRARIES}; fi && \
if [ -n "${PYTHON_LIBRARIES}" ]; then pip install ${PYTHON_LIBRARIES}; fi && \
chown -R kestra:kestra /app
USER kestra

View File

@@ -181,8 +181,8 @@ clone-plugins:
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
@mkdir -p "$(PLUGIN_GIT_DIR)"
@echo "Fetching repository list from GitHub..."
@REPOS=$$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-"); \
for repo in $$REPOS; do \
@REPOS=$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-") \
for repo in $$REPOS; do \
if [[ $$repo == plugin-* ]]; then \
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
echo "Skipping: $$repo (Already cloned)"; \
@@ -194,22 +194,6 @@ clone-plugins:
done
@echo "Done!"
# Pull every plugins in main or master branch
pull-plugins:
@echo "🔍 Pulling repositories in '$(PLUGIN_GIT_DIR)'..."
@for repo in "$(PLUGIN_GIT_DIR)"/*; do \
if [ -d "$$repo/.git" ]; then \
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
echo "🔄 Pulling: $$(basename "$$repo") (branch: $$branch)"; \
git -C "$$repo" pull; \
else \
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch, currently on $$branch)"; \
fi; \
fi; \
done
@echo "✅ Done pulling!"
# Update all plugins jar
build-plugins:
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."

View File

@@ -21,7 +21,7 @@ plugins {
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "6.2.0.5505"
id "org.sonarqube" version "6.1.0.5360"
id 'jacoco-report-aggregation'
// helper
@@ -39,7 +39,7 @@ plugins {
id 'ru.vyarus.github-info' version '2.0.0' apply false
// OWASP dependency check
id "org.owasp.dependencycheck" version "12.1.3" apply false
id "org.owasp.dependencycheck" version "12.1.1" apply false
}
idea {
@@ -165,7 +165,7 @@ allprojects {
* Test
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
if (it.name != 'platform') {
apply plugin: "com.adarshr.test-logger"
java {
@@ -196,9 +196,6 @@ subprojects {
testImplementation 'org.hamcrest:hamcrest'
testImplementation 'org.hamcrest:hamcrest-library'
testImplementation 'org.exparity:hamcrest-date'
//assertj
testImplementation 'org.assertj:assertj-core'
}
test {
@@ -216,8 +213,8 @@ subprojects {
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
environment 'SECRET_NON_B64_SECRET', "some secret value"
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
environment 'ENV_TEST1', "true"
environment 'ENV_TEST2', "Pass by env"
environment 'KESTRA_TEST1', "true"
environment 'KESTRA_TEST2', "Pass by env"
}
testlogger {
@@ -268,7 +265,7 @@ subprojects {
* Allure Reports
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
if (it.name != 'platform') {
dependencies {
testImplementation platform("io.qameta.allure:allure-bom")
testImplementation "io.qameta.allure:allure-junit5"
@@ -282,7 +279,7 @@ subprojects {
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.24"
agent "org.aspectj:aspectjweaver:1.9.23"
}
test {
@@ -295,7 +292,7 @@ subprojects {
* Jacoco
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
if (it.name != 'platform') {
apply plugin: 'jacoco'
test {
@@ -472,15 +469,6 @@ tasks.register('runLocal', JavaExec) {
args 'server', 'local', '--plugins', 'local/plugins'
}
tasks.register('runStandalone', JavaExec) {
group = "application"
description = "Run Kestra as server local"
classpath = project(":cli").sourceSets.main.runtimeClasspath
mainClass = mainClassName
environment 'MICRONAUT_ENVIRONMENTS', 'override'
args 'server', 'standalone', '--plugins', 'local/plugins'
}
/**********************************************************************************************************************\
* Publish
**********************************************************************************************************************/
@@ -496,101 +484,98 @@ nexusPublishing {
}
subprojects {
apply plugin: "maven-publish"
apply plugin: 'signing'
apply plugin: 'ru.vyarus.pom'
apply plugin: 'ru.vyarus.github-info'
if (it.name != 'jmh-benchmarks') {
apply plugin: "maven-publish"
apply plugin: 'signing'
apply plugin: 'ru.vyarus.pom'
apply plugin: 'ru.vyarus.github-info'
javadoc {
options {
locale = 'en_US'
encoding = 'UTF-8'
addStringOption("Xdoclint:none", "-quiet")
}
}
javadoc {
options {
locale = 'en_US'
encoding = 'UTF-8'
addStringOption("Xdoclint:none", "-quiet")
tasks.register('sourcesJar', Jar) {
dependsOn = [':core:copyGradleProperties']
dependsOn = [':ui:assembleFrontend']
archiveClassifier.set('sources')
from sourceSets.main.allSource
}
sourcesJar.dependsOn ':core:copyGradleProperties'
sourcesJar.dependsOn ':ui:assembleFrontend'
tasks.register('javadocJar', Jar) {
archiveClassifier.set('javadoc')
from javadoc
}
tasks.register('testsJar', Jar) {
group = 'build'
description = 'Build the tests jar'
archiveClassifier.set('tests')
if (sourceSets.matching { it.name == 'test'}) {
from sourceSets.named('test').get().output
}
}
github {
user 'kestra-io'
license 'Apache'
repository 'kestra'
site 'https://kestra.io'
}
maven.pom {
description = 'The modern, scalable orchestrator & scheduler open source platform'
developers {
developer {
id = "tchiotludo"
name = "Ludovic Dehon"
}
}
}
tasks.register('sourcesJar', Jar) {
dependsOn = [':core:copyGradleProperties']
dependsOn = [':ui:assembleFrontend']
archiveClassifier.set('sources')
from sourceSets.main.allSource
}
sourcesJar.dependsOn ':core:copyGradleProperties'
sourcesJar.dependsOn ':ui:assembleFrontend'
publishing {
publications {
sonatypePublication(MavenPublication) {
version project.version
tasks.register('javadocJar', Jar) {
archiveClassifier.set('javadoc')
from javadoc
}
if (project.name.contains('cli')) {
groupId "io.kestra"
artifactId "kestra"
tasks.register('testsJar', Jar) {
group = 'build'
description = 'Build the tests jar'
artifact shadowJar
artifact executableJar
} else if (project.name.contains('platform')){
groupId project.group
artifactId project.name
} else {
from components.java
archiveClassifier.set('tests')
if (sourceSets.matching { it.name == 'test'}) {
from sourceSets.named('test').get().output
}
}
groupId project.group
artifactId project.name
github {
user 'kestra-io'
license 'Apache'
repository 'kestra'
site 'https://kestra.io'
}
maven.pom {
description = 'The modern, scalable orchestrator & scheduler open source platform'
developers {
developer {
id = "tchiotludo"
name = "Ludovic Dehon"
artifact sourcesJar
artifact javadocJar
artifact testsJar
}
}
}
}
publishing {
publications {
sonatypePublication(MavenPublication) {
version project.version
signing {
// only sign JARs that we publish to Sonatype
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
sign publishing.publications.sonatypePublication
}
if (project.name.contains('cli')) {
groupId "io.kestra"
artifactId "kestra"
artifact shadowJar
artifact executableJar
} else if (project.name.contains('platform')){
groupId project.group
artifactId project.name
} else {
from components.java
groupId project.group
artifactId project.name
artifact sourcesJar
artifact javadocJar
artifact testsJar
}
}
}
}
signing {
// only sign JARs that we publish to Sonatype
required { gradle.taskGraph.hasTask("publishSonatypePublicationPublicationToSonatypeRepository") }
sign publishing.publications.sonatypePublication
}
tasks.withType(GenerateModuleMetadata).configureEach {
// Suppression this validation error as we want to enforce the Kestra platform
suppressedValidationErrors.add('enforced-platform')
}
tasks.withType(GenerateModuleMetadata).configureEach {
// Suppression this validation error as we want to enforce the Kestra platform
suppressedValidationErrors.add('enforced-platform')
}
}

View File

@@ -36,5 +36,5 @@ dependencies {
implementation project(":webserver")
//test
testImplementation "org.wiremock:wiremock-jetty12"
testImplementation "org.wiremock:wiremock"
}

View File

@@ -1,7 +1,5 @@
package io.kestra.cli;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.http.HttpHeaders;
import io.micronaut.http.HttpRequest;
@@ -92,7 +90,7 @@ public abstract class AbstractApiCommand extends AbstractCommand {
throw new IllegalArgumentException("'path' must be non-null and start with '/'");
}
return tenantId == null ? "/api/v1/" + MAIN_TENANT + path : "/api/v1/" + tenantId + path;
return tenantId == null ? "/api/v1" + path : "/api/v1/" + tenantId + path;
}
@Builder

View File

@@ -2,7 +2,6 @@ package io.kestra.cli;
import io.kestra.cli.commands.configs.sys.ConfigCommand;
import io.kestra.cli.commands.flows.FlowCommand;
import io.kestra.cli.commands.migrations.MigrationCommand;
import io.kestra.cli.commands.namespaces.NamespaceCommand;
import io.kestra.cli.commands.plugins.PluginCommand;
import io.kestra.cli.commands.servers.ServerCommand;
@@ -43,7 +42,6 @@ import java.util.concurrent.Callable;
SysCommand.class,
ConfigCommand.class,
NamespaceCommand.class,
MigrationCommand.class,
}
)
@Introspected

View File

@@ -1,29 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "migrate",
description = "handle migrations",
mixinStandardHelpOptions = true,
subcommands = {
TenantMigrationCommand.class,
}
)
@Slf4j
public class MigrationCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "migrate", "--help");
return 0;
}
}

View File

@@ -1,49 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.repositories.TenantMigrationInterface;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import picocli.CommandLine.Option;
@CommandLine.Command(
name = "default-tenant",
description = "migrate every elements from no tenant to the main tenant"
)
@Slf4j
public class TenantMigrationCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@Option(names = "--tenant-id", description = "tenant identifier")
String tenantId;
@Option(names = "--tenant-name", description = "tenant name")
String tenantName;
@Option(names = "--dry-run", description = "Preview only, do not update")
boolean dryRun;
@Override
public Integer call() throws Exception {
super.call();
if (dryRun) {
System.out.println("🧪 Dry-run mode enabled. No changes will be applied.");
}
TenantMigrationService migrationService = this.applicationContext.getBean(TenantMigrationService.class);
try {
migrationService.migrateTenant(tenantId, tenantName, dryRun);
System.out.println("✅ Tenant migration complete.");
} catch (Exception e) {
System.err.println("❌ Tenant migration failed: " + e.getMessage());
e.printStackTrace();
return 1;
}
return 0;
}
}

View File

@@ -1,56 +0,0 @@
package io.kestra.cli.commands.migrations;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import com.github.javaparser.utils.Log;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.TenantMigrationInterface;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
@Singleton
@Slf4j
public class TenantMigrationService {
@Inject
private TenantMigrationInterface tenantMigrationInterface;
@Inject
private FlowRepositoryInterface flowRepository;
@Inject
@Named(QueueFactoryInterface.FLOW_NAMED)
private QueueInterface<FlowInterface> flowQueue;
public void migrateTenant(String tenantId, String tenantName, boolean dryRun) {
if (StringUtils.isNotBlank(tenantId) && !MAIN_TENANT.equals(tenantId)){
throw new KestraRuntimeException("Tenant configuration is an enterprise feature. It can only be main in OSS");
}
Log.info("🔁 Starting tenant migration...");
tenantMigrationInterface.migrateTenant(MAIN_TENANT, dryRun);
migrateQueue(dryRun);
}
protected void migrateQueue(boolean dryRun) {
if (!dryRun){
log.info("🔁 Starting restoring queue...");
flowRepository.findAllWithSourceForAllTenants().forEach(flow -> {
try {
flowQueue.emit(flow);
} catch (QueueException e) {
log.warn("Unable to send the flow {} to the queue", flow.uid(), e);
}
});
}
}
}

View File

@@ -1,7 +1,6 @@
package io.kestra.cli.commands.plugins;
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.plugins.LocalPluginManager;
import io.kestra.core.plugins.MavenPluginDownloader;
import io.kestra.core.plugins.PluginArtifact;
@@ -52,7 +51,7 @@ public class PluginInstallCommand extends AbstractCommand {
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
@Inject
Provider<PluginCatalogService> pluginCatalogService;
@Client("api") HttpClient httpClient;
@Override
public Integer call() throws Exception {
@@ -86,7 +85,7 @@ public class PluginInstallCommand extends AbstractCommand {
}
if (all) {
PluginCatalogService service = pluginCatalogService.get();
PluginCatalogService service = new PluginCatalogService(httpClient, false, true);
dependencies = service.get().stream().map(Objects::toString).toList();
}
@@ -104,21 +103,12 @@ public class PluginInstallCommand extends AbstractCommand {
}
try (final PluginManager pluginManager = getPluginManager()) {
List<PluginArtifact> installed;
if (all) {
installed = new ArrayList<>(pluginArtifacts.size());
for (PluginArtifact pluginArtifact : pluginArtifacts) {
try {
installed.add(pluginManager.install(pluginArtifact, repositoryConfigs, false, pluginsPath));
} catch (KestraRuntimeException e) {
String cause = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
stdErr("Failed to install plugin {0}. Cause: {1}", pluginArtifact, cause);
}
}
} else {
installed = pluginManager.install(pluginArtifacts, repositoryConfigs, false, pluginsPath);
}
List<PluginArtifact> installed = pluginManager.install(
pluginArtifacts,
repositoryConfigs,
false,
pluginsPath
);
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);

View File

@@ -98,7 +98,7 @@ public class StandAloneCommand extends AbstractServerCommand {
if (flowPath != null) {
try {
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
localFlowRepositoryLoader.load(null, this.flowPath);
localFlowRepositoryLoader.load(this.flowPath);
} catch (IOException e) {
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
}

View File

@@ -1,6 +1,6 @@
package io.kestra.cli.services;
import io.kestra.core.exceptions.FlowProcessingException;
import io.kestra.core.exceptions.DeserializationException;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithPath;
import io.kestra.core.models.flows.FlowWithSource;
@@ -12,8 +12,8 @@ import io.kestra.core.services.PluginDefaultService;
import io.micronaut.context.annotation.Requires;
import io.micronaut.context.annotation.Value;
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
import jakarta.annotation.Nullable;
import jakarta.inject.Inject;
import jakarta.annotation.Nullable;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
@@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
@Singleton
@Slf4j
@Requires(property = "micronaut.io.watch.enabled", value = "true")
@@ -113,8 +111,6 @@ public class FileChangedEventListener {
}
public void startListening(List<Path> paths) throws IOException, InterruptedException {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
for (Path path : paths) {
path.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
}
@@ -193,8 +189,6 @@ public class FileChangedEventListener {
}
private void loadFlowsFromFolder(Path folder) {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
try {
Files.walkFileTree(folder, new SimpleFileVisitor<Path>() {
@Override
@@ -238,13 +232,11 @@ public class FileChangedEventListener {
}
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
try {
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
modelValidator.validate(flow);
return Optional.of(flow);
} catch (ConstraintViolationException | FlowProcessingException e) {
} catch (DeserializationException | ConstraintViolationException e) {
log.warn("Error while parsing flow: {}", entry, e);
}
return Optional.empty();

View File

@@ -15,7 +15,7 @@ public class LocalFlowFileWatcher implements FlowFilesManager {
@Override
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
return flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId())
return flowRepository.findById(null, flow.getNamespace(), flow.getId())
.map(previous -> flowRepository.update(flow, previous))
.orElseGet(() -> flowRepository.create(flow));
}

View File

@@ -15,9 +15,6 @@ micronaut:
static:
paths: classpath:static
mapping: /static/**
root:
paths: classpath:root
mapping: /**
server:
max-request-size: 10GB
multipart:
@@ -29,11 +26,11 @@ micronaut:
netty:
max-chunk-size: 10MB
max-header-size: 32768 # increased from the default of 8k
responses:
file:
cache-seconds: 86400
cache-control:
public: true
responses:
file:
cache-seconds: 86400
cache-control:
public: true
# Access log configuration, see https://docs.micronaut.io/latest/guide/index.html#accessLogger
access-logger:
@@ -141,8 +138,8 @@ kestra:
jdbc:
queues:
min-poll-interval: 25ms
max-poll-interval: 500ms
poll-switch-interval: 60s
max-poll-interval: 1000ms
poll-switch-interval: 5s
cleaner:
initial-delay: 1h
@@ -171,7 +168,7 @@ kestra:
values:
recoverMissedSchedules: ALL
variables:
env-vars-prefix: ENV_
env-vars-prefix: KESTRA_
cache-enabled: true
cache-size: 1000
@@ -198,18 +195,13 @@ kestra:
liveness:
enabled: true
# The expected time between liveness probe.
interval: 10s
interval: 5s
# The timeout used to detect service failures.
timeout: 1m
timeout: 45s
# The time to wait before executing a liveness probe.
initialDelay: 1m
initialDelay: 45s
# The expected time between service heartbeats.
heartbeatInterval: 3s
service:
purge:
initial-delay: 1h
fixed-delay: 1d
retention: 30d
anonymous-usage-report:
enabled: true
uri: https://api.kestra.io/v1/reports/usages

View File

@@ -13,7 +13,8 @@ import picocli.CommandLine;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.junit.jupiter.api.Assertions.assertTrue;
class AppTest {
@@ -25,7 +26,7 @@ class AppTest {
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
PicocliRunner.call(App.class, ctx, "--help");
assertThat(out.toString()).contains("kestra");
assertThat(out.toString(), containsString("kestra"));
}
}
@@ -41,7 +42,7 @@ class AppTest {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
assertThat(out.toString()).startsWith("Usage: kestra server " + serverType);
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
}
}
@@ -55,9 +56,9 @@ class AppTest {
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
assertThat(out.toString()).startsWith("Missing required parameters: ");
assertThat(out.toString()).contains("Usage: kestra flow namespace update ");
assertThat(out.toString()).doesNotContain("MissingParameterException: ");
assertThat(out.toString(), startsWith("Missing required parameters: "));
assertThat(out.toString(), containsString("Usage: kestra flow namespace update "));
assertThat(out.toString(), not(containsString("MissingParameterException: ")));
}
}
}

View File

@@ -8,7 +8,8 @@ import org.junit.jupiter.api.Test;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
class ServerCommandValidatorTest {
@@ -39,8 +40,8 @@ class ServerCommandValidatorTest {
.start()
);
final Throwable rootException = getRootException(exception);
assertThat(rootException.getClass()).isEqualTo(ServerCommandValidator.ServerCommandException.class);
assertThat(rootException.getMessage()).isEqualTo("Incomplete server configuration - missing required properties");
assertThat(rootException.getClass(), is(ServerCommandValidator.ServerCommandException.class));
assertThat(rootException.getMessage(), is("Incomplete server configuration - missing required properties"));
}
private Throwable getRootException(Throwable exception) {

View File

@@ -4,14 +4,12 @@ import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import org.junit.jupiter.api.Test;
import org.yaml.snakeyaml.Yaml;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.StringContains.containsString;
class ConfigPropertiesCommandTest {
@Test
@@ -22,52 +20,8 @@ class ConfigPropertiesCommandTest {
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
assertThat(out.toString()).contains("activeEnvironments:");
assertThat(out.toString()).contains("- test");
}
}
@Test
void shouldOutputCustomEnvironment() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, "custom-env")) {
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
assertThat(out.toString()).contains("activeEnvironments:");
assertThat(out.toString()).contains("- custom-env");
}
}
@Test
void shouldReturnZeroOnSuccess() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
ConfigPropertiesCommand cmd = ctx.createBean(ConfigPropertiesCommand.class);
int result = cmd.call();
assertThat(result).isZero();
}
}
@Test
void shouldOutputValidYaml() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
String output = out.toString();
Yaml yaml = new Yaml();
Throwable thrown = catchThrowable(() -> {
Map<?, ?> parsed = yaml.load(output);
assertThat(parsed).isInstanceOf(Map.class);
});
assertThat(thrown).isNull();
assertThat(out.toString(), containsString("activeEnvironments:"));
assertThat(out.toString(), containsString("- test"));
}
}
}

View File

@@ -11,7 +11,9 @@ import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class FlowCreateOrUpdateCommandTest {
@RetryingTest(5) // flaky on CI but cannot be reproduced even with 100 repetitions
@@ -36,7 +38,7 @@ class FlowCreateOrUpdateCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("4 flow(s)");
assertThat(out.toString(), containsString("4 flow(s)"));
out.reset();
args = new String[]{
@@ -51,7 +53,7 @@ class FlowCreateOrUpdateCommandTest {
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
// 2 delete + 1 update
assertThat(out.toString()).contains("4 flow(s)");
assertThat(out.toString(), containsString("4 flow(s)"));
}
}
@@ -78,7 +80,7 @@ class FlowCreateOrUpdateCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("4 flow(s)");
assertThat(out.toString(), containsString("4 flow(s)"));
out.reset();
// no "delete" arg should behave as no-delete
@@ -91,7 +93,7 @@ class FlowCreateOrUpdateCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("1 flow(s)");
assertThat(out.toString(), containsString("1 flow(s)"));
out.reset();
args = new String[]{
@@ -104,7 +106,7 @@ class FlowCreateOrUpdateCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("1 flow(s)");
assertThat(out.toString(), containsString("1 flow(s)"));
}
}
@@ -129,8 +131,8 @@ class FlowCreateOrUpdateCommandTest {
};
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("1 flow(s)");
assertThat(call, is(0));
assertThat(out.toString(), containsString("1 flow(s)"));
}
}
}

View File

@@ -9,7 +9,9 @@ import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class FlowDotCommandTest {
@Test
@@ -24,8 +26,8 @@ class FlowDotCommandTest {
};
Integer call = PicocliRunner.call(FlowDotCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("\"root.date\"[shape=box];");
assertThat(call, is(0));
assertThat(out.toString(), containsString("\"root.date\"[shape=box];"));
}
}
}

View File

@@ -7,7 +7,8 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
class FlowExpandCommandTest {
@SuppressWarnings("deprecation")
@@ -22,20 +23,22 @@ class FlowExpandCommandTest {
};
Integer call = PicocliRunner.call(FlowExpandCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).isEqualTo("id: include\n" +
"namespace: io.kestra.cli\n" +
"\n" +
"# The list of tasks\n" +
"tasks:\n" +
"- id: t1\n" +
" type: io.kestra.plugin.core.debug.Return\n" +
" format: \"Lorem ipsum dolor sit amet\"\n" +
"- id: t2\n" +
" type: io.kestra.plugin.core.debug.Return\n" +
" format: |\n" +
" Lorem ipsum dolor sit amet\n" +
" Lorem ipsum dolor sit amet\n");
assertThat(call, is(0));
assertThat(out.toString(), is(
"id: include\n" +
"namespace: io.kestra.cli\n" +
"\n" +
"# The list of tasks\n" +
"tasks:\n" +
"- id: t1\n" +
" type: io.kestra.plugin.core.debug.Return\n" +
" format: \"Lorem ipsum dolor sit amet\"\n" +
"- id: t2\n" +
" type: io.kestra.plugin.core.debug.Return\n" +
" format: |\n" +
" Lorem ipsum dolor sit amet\n" +
" Lorem ipsum dolor sit amet\n"
));
}
}
}

View File

@@ -14,7 +14,10 @@ import java.io.PrintStream;
import java.net.URL;
import java.util.zip.ZipFile;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class FlowExportCommandTest {
@Test
@@ -39,7 +42,7 @@ class FlowExportCommandTest {
directory.getPath(),
};
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, updateArgs);
assertThat(out.toString()).contains("3 flow(s)");
assertThat(out.toString(), containsString("3 flow(s)"));
// then we export them
String[] exportArgs = {
@@ -55,11 +58,11 @@ class FlowExportCommandTest {
};
PicocliRunner.call(FlowExportCommand.class, ctx, exportArgs);
File file = new File("/tmp/flows.zip");
assertThat(file.exists()).isTrue();
assertThat(file.exists(), is(true));
ZipFile zipFile = new ZipFile(file);
// When launching the test in a suite, there is 4 flows but when lauching individualy there is only 3
assertThat(zipFile.stream().count()).isGreaterThanOrEqualTo(3L);
assertThat(zipFile.stream().count(), greaterThanOrEqualTo(3L));
file.delete();
}

View File

@@ -10,7 +10,9 @@ import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class FlowUpdatesCommandTest {
@Test
@@ -37,7 +39,7 @@ class FlowUpdatesCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("successfully updated !");
assertThat(out.toString(), containsString("successfully updated !"));
out.reset();
args = new String[]{
@@ -54,7 +56,7 @@ class FlowUpdatesCommandTest {
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
// 2 delete + 1 update
assertThat(out.toString()).contains("successfully updated !");
assertThat(out.toString(), containsString("successfully updated !"));
}
}
@@ -83,7 +85,7 @@ class FlowUpdatesCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("4 flow(s)");
assertThat(out.toString(), containsString("4 flow(s)"));
out.reset();
// no "delete" arg should behave as no-delete
@@ -98,7 +100,7 @@ class FlowUpdatesCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("1 flow(s)");
assertThat(out.toString(), containsString("1 flow(s)"));
out.reset();
args = new String[]{
@@ -113,7 +115,7 @@ class FlowUpdatesCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("1 flow(s)");
assertThat(out.toString(), containsString("1 flow(s)"));
}
}
@@ -142,7 +144,7 @@ class FlowUpdatesCommandTest {
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("Invalid entity: flow.namespace: main_io.kestra.outsider_quattro_-1 - flow namespace is invalid");
assertThat(out.toString(), containsString("Invalid entity: flow.namespace: io.kestra.outsider_quattro_-1 - flow namespace is invalid"));
}
}
@@ -169,8 +171,8 @@ class FlowUpdatesCommandTest {
};
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("1 flow(s)");
assertThat(call, is(0));
assertThat(out.toString(), containsString("1 flow(s)"));
}
}
}

View File

@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class FlowValidateCommandTest {
@Test
@@ -22,8 +24,8 @@ class FlowValidateCommandTest {
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
assertThat(call, is(0));
assertThat(out.toString(), containsString("✓ - io.kestra.cli / include"));
}
}
@@ -39,10 +41,10 @@ class FlowValidateCommandTest {
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("✓ - system / warning");
assertThat(out.toString()).contains("⚠ - tasks[0] is deprecated");
assertThat(out.toString()).contains(" - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log");
assertThat(call, is(0));
assertThat(out.toString(), containsString("✓ - system / warning"));
assertThat(out.toString(), containsString("⚠ - tasks[0] is deprecated"));
assertThat(out.toString(), containsString(" - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log"));
}
}
}

View File

@@ -10,13 +10,15 @@ import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.StringContains.containsString;
public class SingleFlowCommandsTest {
class SingleFlowCommandsTest {
@Test
void all() {
URL flow = SingleFlowCommandsTest.class.getClassLoader().getResource("crudFlow/date.yml");
URL flow = SingleFlowCommandsTest.class.getClassLoader().getResource("flows/quattro.yml");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
@@ -25,6 +27,19 @@ class SingleFlowCommandsTest {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] deleteArgs = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"io.kestra.outsider",
"quattro"
};
PicocliRunner.call(FlowDeleteCommand.class, ctx, deleteArgs);
assertThat(out.toString(), containsString("Flow successfully deleted !"));
out.reset();
String[] createArgs = {
"--server",
embeddedServer.getURL().toString(),
@@ -34,36 +49,23 @@ class SingleFlowCommandsTest {
};
PicocliRunner.call(FlowCreateCommand.class, ctx, createArgs);
assertThat(out.toString()).contains("Flow successfully created !");
assertThat(out.toString(), containsString("Flow successfully created !"));
out.reset();
String[] updateArgs = {
out.reset();String[] updateArgs = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
flow.getPath(),
"io.kestra.cli",
"date"
"io.kestra.outsider",
"quattro"
};
PicocliRunner.call(FlowUpdateCommand.class, ctx, updateArgs);
assertThat(out.toString()).contains("Flow successfully updated !");
assertThat(out.toString(), containsString("Flow successfully updated !"));
out.reset();
String[] deleteArgs = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"io.kestra.cli",
"date"
};
PicocliRunner.call(FlowDeleteCommand.class, ctx, deleteArgs);
assertThat(out.toString()).contains("Flow successfully deleted !");
}
}
}

View File

@@ -10,7 +10,9 @@ import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class TemplateValidateCommandTest {
@Test
@@ -26,9 +28,9 @@ class TemplateValidateCommandTest {
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse flow");
assertThat(out.toString()).contains("must not be empty");
assertThat(call, is(1));
assertThat(out.toString(), containsString("Unable to parse flow"));
assertThat(out.toString(), containsString("must not be empty"));
}
}
@@ -54,9 +56,9 @@ class TemplateValidateCommandTest {
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse flow");
assertThat(out.toString()).contains("must not be empty");
assertThat(call, is(1));
assertThat(out.toString(), containsString("Unable to parse flow"));
assertThat(out.toString(), containsString("must not be empty"));
}
}
}

View File

@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class FlowNamespaceCommandTest {
@Test
@@ -19,8 +21,8 @@ class FlowNamespaceCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(FlowNamespaceCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra flow namespace");
assertThat(call, is(0));
assertThat(out.toString(), containsString("Usage: kestra flow namespace"));
}
}
}

View File

@@ -10,7 +10,10 @@ import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.StringContains.containsString;
import static org.hamcrest.core.Is.is;
class FlowNamespaceUpdateCommandTest {
@Test
@@ -36,7 +39,7 @@ class FlowNamespaceUpdateCommandTest {
};
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
out.reset();
args = new String[]{
@@ -52,7 +55,7 @@ class FlowNamespaceUpdateCommandTest {
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
// 2 delete + 1 update
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
}
}
@@ -78,9 +81,9 @@ class FlowNamespaceUpdateCommandTest {
};
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse flows");
assertThat(out.toString()).contains("must not be empty");
assertThat(call, is(1));
assertThat(out.toString(), containsString("Unable to parse flows"));
assertThat(out.toString(), containsString("must not be empty"));
}
}
@@ -108,7 +111,7 @@ class FlowNamespaceUpdateCommandTest {
};
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("3 flow(s)");
assertThat(out.toString(), containsString("3 flow(s)"));
out.reset();
// no "delete" arg should behave as no-delete
@@ -122,7 +125,7 @@ class FlowNamespaceUpdateCommandTest {
};
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("1 flow(s)");
assertThat(out.toString(), containsString("1 flow(s)"));
out.reset();
args = new String[]{
@@ -136,7 +139,7 @@ class FlowNamespaceUpdateCommandTest {
};
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("1 flow(s)");
assertThat(out.toString(), containsString("1 flow(s)"));
}
}
@@ -162,8 +165,8 @@ class FlowNamespaceUpdateCommandTest {
};
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("1 flow(s)");
assertThat(call, is(0));
assertThat(out.toString(), containsString("1 flow(s)"));
}
}
@@ -192,8 +195,8 @@ class FlowNamespaceUpdateCommandTest {
};
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("io.kestra.override");
assertThat(out.toString()).doesNotContain("io.kestra.cli");
assertThat(out.toString(), containsString("io.kestra.override"));
assertThat(out.toString(), not(containsString("io.kestra.cli")));
}
}

View File

@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class NamespaceCommandTest {
@Test
@@ -19,8 +21,8 @@ class NamespaceCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(NamespaceCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra namespace");
assertThat(call, is(0));
assertThat(out.toString(), containsString("Usage: kestra namespace"));
}
}
}

View File

@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class NamespaceFilesCommandTest {
@Test
@@ -19,8 +21,8 @@ class NamespaceFilesCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(NamespaceFilesCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra namespace files");
assertThat(call, is(0));
assertThat(out.toString(), containsString("Usage: kestra namespace files"));
}
}
}

View File

@@ -14,8 +14,8 @@ import java.net.URISyntaxException;
import java.net.URL;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.StringContains.containsString;
class NamespaceFilesUpdateCommandTest {
@Test

View File

@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class KvCommandTest {
@Test
@@ -19,8 +21,8 @@ class KvCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(KvCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra namespace kv");
assertThat(call, is(0));
assertThat(out.toString(), containsString("Usage: kestra namespace kv"));
}
}
}

View File

@@ -16,8 +16,8 @@ import java.io.IOException;
import java.nio.file.Files;
import java.util.Map;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
class KvUpdateCommandTest {
@Test
@@ -41,10 +41,10 @@ class KvUpdateCommandTest {
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
assertThat(kvStore.getValue("string").get()).isEqualTo(new KVValue("stringValue"));
assertThat(((InternalKVStore) kvStore).getRawValue("string").get()).isEqualTo("\"stringValue\"");
assertThat(kvStore.getValue("string").get(), is(new KVValue("stringValue")));
assertThat(((InternalKVStore)kvStore).getRawValue("string").get(), is("\"stringValue\""));
}
}
@@ -69,10 +69,10 @@ class KvUpdateCommandTest {
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
assertThat(kvStore.getValue("int").get()).isEqualTo(new KVValue(1));
assertThat(((InternalKVStore) kvStore).getRawValue("int").get()).isEqualTo("1");
assertThat(kvStore.getValue("int").get(), is(new KVValue(1)));
assertThat(((InternalKVStore)kvStore).getRawValue("int").get(), is("1"));
}
}
@@ -99,10 +99,10 @@ class KvUpdateCommandTest {
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
assertThat(kvStore.getValue("intStr").get()).isEqualTo(new KVValue("1"));
assertThat(((InternalKVStore) kvStore).getRawValue("intStr").get()).isEqualTo("\"1\"");
assertThat(kvStore.getValue("intStr").get(), is(new KVValue("1")));
assertThat(((InternalKVStore)kvStore).getRawValue("intStr").get(), is("\"1\""));
}
}
@@ -127,10 +127,10 @@ class KvUpdateCommandTest {
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
assertThat(kvStore.getValue("object").get()).isEqualTo(new KVValue(Map.of("some", "json")));
assertThat(((InternalKVStore) kvStore).getRawValue("object").get()).isEqualTo("{some:\"json\"}");
assertThat(kvStore.getValue("object").get(), is(new KVValue(Map.of("some", "json"))));
assertThat(((InternalKVStore)kvStore).getRawValue("object").get(), is("{some:\"json\"}"));
}
}
@@ -157,10 +157,10 @@ class KvUpdateCommandTest {
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
assertThat(kvStore.getValue("objectStr").get()).isEqualTo(new KVValue("{\"some\":\"json\"}"));
assertThat(((InternalKVStore) kvStore).getRawValue("objectStr").get()).isEqualTo("\"{\\\"some\\\":\\\"json\\\"}\"");
assertThat(kvStore.getValue("objectStr").get(), is(new KVValue("{\"some\":\"json\"}")));
assertThat(((InternalKVStore)kvStore).getRawValue("objectStr").get(), is("\"{\\\"some\\\":\\\"json\\\"}\""));
}
}
@@ -191,10 +191,10 @@ class KvUpdateCommandTest {
PicocliRunner.call(KvUpdateCommand.class, ctx, args);
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
KVStore kvStore = kvStoreService.get(MAIN_TENANT, "io.kestra.cli", null);
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
assertThat(kvStore.getValue("objectFromFile").get()).isEqualTo(new KVValue(Map.of("some", "json", "from", "file")));
assertThat(((InternalKVStore) kvStore).getRawValue("objectFromFile").get()).isEqualTo("{some:\"json\",from:\"file\"}");
assertThat(kvStore.getValue("objectFromFile").get(), is(new KVValue(Map.of("some", "json", "from", "file"))));
assertThat(((InternalKVStore)kvStore).getRawValue("objectFromFile").get(), is("{some:\"json\",from:\"file\"}"));
}
}
}

View File

@@ -8,7 +8,8 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.StringContains.containsString;
class PluginCommandTest {
@@ -20,35 +21,7 @@ class PluginCommandTest {
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
PicocliRunner.call(PluginCommand.class, ctx);
assertThat(out.toString()).contains("Usage: kestra plugins");
assertThat(out.toString(), containsString("Usage: kestra plugins"));
}
}
// Additional Coverage:
@Test
void shouldListSubcommandsInHelp() throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream originalOut = System.out;
System.setOut(new PrintStream(out));
try {
PluginCommand cmd = new PluginCommand();
cmd.call();
String output = out.toString();
assertThat(output).contains("install");
assertThat(output).contains("uninstall");
assertThat(output).contains("list");
assertThat(output).contains("doc");
assertThat(output).contains("search");
} finally {
System.setOut(originalOut);
}
}
// Passes
@Test
void shouldNotLoadExternalPlugins() {
PluginCommand cmd = new PluginCommand();
assertThat(cmd.loadExternalPlugins()).isFalse();
}
}
}

View File

@@ -17,11 +17,12 @@ import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
class PluginDocCommandTest {
public static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.24.0-SNAPSHOT.jar";
public static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.18.0-SNAPSHOT.jar";
@Test
void run() throws IOException, URISyntaxException {
@@ -43,16 +44,16 @@ class PluginDocCommandTest {
List<Path> files = Files.list(docPath).toList();
assertThat(files.size()).isEqualTo(1);
assertThat(files.getFirst().getFileName().toString()).isEqualTo("plugin-template-test");
assertThat(files.size(), is(1));
assertThat(files.getFirst().getFileName().toString(), is("plugin-template-test"));
var directory = files.getFirst().toFile();
assertThat(directory.isDirectory()).isTrue();
assertThat(directory.listFiles().length).isEqualTo(3);
assertThat(directory.isDirectory(), is(true));
assertThat(directory.listFiles().length, is(3));
var readme = directory.toPath().resolve("index.md");
var readmeContent = new String(Files.readAllBytes(readme));
assertThat(readmeContent).contains("""
assertThat(readmeContent, containsString("""
---
title: Template test
description: "Plugin template for Kestra"
@@ -60,17 +61,18 @@ class PluginDocCommandTest {
---
# Template test
""");
"""));
assertThat(readmeContent).contains("""
assertThat(readmeContent, containsString("""
Plugin template for Kestra
This is a more complex description of the plugin.
This is in markdown and will be inline inside the plugin page.
""");
"""));
assertThat(readmeContent).contains("""
assertThat(readmeContent, containsString(
"""
/> Subgroup title
Subgroup description
@@ -87,20 +89,20 @@ class PluginDocCommandTest {
\s
* [Reporting](./guides/reporting.md)
\s
""");
"""));
// check @PluginProperty from an interface
var task = directory.toPath().resolve("tasks/io.kestra.plugin.templates.ExampleTask.md");
String taskDoc = new String(Files.readAllBytes(task));
assertThat(taskDoc).contains("""
assertThat(taskDoc, containsString("""
### `example`
* **Type:** ==string==
* **Dynamic:** ✔️
* **Required:** ❌
**Example interface**
""");
assertThat(taskDoc).contains("""
"""));
assertThat(taskDoc, containsString("""
### `from`
* **Type:**
* ==string==
@@ -108,12 +110,12 @@ class PluginDocCommandTest {
* [==Example==](#io.kestra.core.models.annotations.example)
* **Dynamic:** ✔️
* **Required:** ✔️
""");
"""));
var authenticationGuide = directory.toPath().resolve("guides/authentication.md");
assertThat(new String(Files.readAllBytes(authenticationGuide))).contains("This is how to authenticate for this plugin:");
assertThat(new String(Files.readAllBytes(authenticationGuide)), containsString("This is how to authenticate for this plugin:"));
var reportingGuide = directory.toPath().resolve("guides/reporting.md");
assertThat(new String(Files.readAllBytes(reportingGuide))).contains("This is the reporting of the plugin:");
assertThat(new String(Files.readAllBytes(reportingGuide)), containsString("This is the reporting of the plugin:"));
}
}
}

View File

@@ -10,7 +10,8 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
class PluginInstallCommandTest {
@@ -25,8 +26,8 @@ class PluginInstallCommandTest {
List<Path> files = Files.list(pluginsPath).toList();
assertThat(files.size()).isEqualTo(1);
assertThat(files.getFirst().getFileName().toString()).isEqualTo("io_kestra_plugin__plugin-notifications__0_6_0.jar");
assertThat(files.size(), is(1));
assertThat(files.getFirst().getFileName().toString(), is("io_kestra_plugin__plugin-notifications__0_6_0.jar"));
}
}
@@ -41,9 +42,9 @@ class PluginInstallCommandTest {
List<Path> files = Files.list(pluginsPath).toList();
assertThat(files.size()).isEqualTo(1);
assertThat(files.getFirst().getFileName().toString()).startsWith("io_kestra_plugin__plugin-notifications__");
assertThat(files.getFirst().getFileName().toString()).doesNotContain("LATEST");
assertThat(files.size(), is(1));
assertThat(files.getFirst().getFileName().toString(), startsWith("io_kestra_plugin__plugin-notifications__"));
assertThat(files.getFirst().getFileName().toString(), not(containsString("LATEST")));
}
}
@@ -59,8 +60,8 @@ class PluginInstallCommandTest {
List<Path> files = Files.list(pluginsPath).toList();
assertThat(files.size()).isEqualTo(1);
assertThat(files.getFirst().getFileName().toString()).isEqualTo("io_kestra_storage__storage-s3__0_12_1.jar");
assertThat(files.size(), is(1));
assertThat(files.getFirst().getFileName().toString(), is("io_kestra_storage__storage-s3__0_12_1.jar"));
}
}
}

View File

@@ -16,11 +16,12 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Objects;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.StringContains.containsString;
class PluginListCommandTest {
private static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.24.0-SNAPSHOT.jar";
private static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.18.0-SNAPSHOT.jar";
@Test
void shouldListPluginsInstalledLocally() throws IOException, URISyntaxException {
@@ -40,7 +41,7 @@ class PluginListCommandTest {
String[] args = {"--plugins", pluginsPath.toAbsolutePath().toString()};
PicocliRunner.call(PluginListCommand.class, ctx, args);
assertThat(out.toString()).contains("io.kestra.plugin.templates.Example");
assertThat(out.toString(), containsString("io.kestra.plugin.templates.Example"));
}
}
}

View File

@@ -13,7 +13,8 @@ import java.io.PrintStream;
import java.util.Map;
import static com.github.tomakehurst.wiremock.client.WireMock.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
@WireMockTest(httpPort = 28181)
class PluginSearchCommandTest {
@@ -60,9 +61,9 @@ class PluginSearchCommandTest {
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
String output = outputStreamCaptor.toString().trim();
assertThat(output).contains("Found 1 plugins matching 'notifications'");
assertThat(output).contains("plugin-notifications");
assertThat(output).doesNotContain("plugin-scripts");
assertThat(output, containsString("Found 1 plugins matching 'notifications'"));
assertThat(output, containsString("plugin-notifications"));
assertThat(output, not(containsString("plugin-scripts")));
}
}
@@ -96,9 +97,9 @@ class PluginSearchCommandTest {
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
String output = outputStreamCaptor.toString().trim();
assertThat(output).contains("Found 2 plugins");
assertThat(output).contains("plugin-notifications");
assertThat(output).contains("plugin-scripts");
assertThat(output, containsString("Found 2 plugins"));
assertThat(output, containsString("plugin-notifications"));
assertThat(output, containsString("plugin-scripts"));
}
}
}

View File

@@ -11,7 +11,9 @@ import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class ReindexCommandTest {
@Test
@@ -34,7 +36,7 @@ class ReindexCommandTest {
directory.getPath(),
};
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, updateArgs);
assertThat(out.toString()).contains("3 flow(s)");
assertThat(out.toString(), containsString("3 flow(s)"));
// then we reindex them
String[] reindexArgs = {
@@ -42,9 +44,9 @@ class ReindexCommandTest {
"flow",
};
Integer call = PicocliRunner.call(ReindexCommand.class, ctx, reindexArgs);
assertThat(call).isZero();
assertThat(call, is(0));
// in local it reindex 3 flows and in CI 4 for an unknown reason
assertThat(out.toString()).contains("Successfully reindex");
assertThat(out.toString(), containsString("Successfully reindex"));
}
}
}

View File

@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class DatabaseCommandTest {
@Test
@@ -19,8 +21,8 @@ class DatabaseCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(DatabaseCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra sys database");
assertThat(call, is(0));
assertThat(out.toString(), containsString("Usage: kestra sys database"));
}
}
}

View File

@@ -8,7 +8,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class StateStoreCommandTest {
@Test
@@ -20,8 +22,8 @@ class StateStoreCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(StateStoreCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra sys state-store");
assertThat(call, is(0));
assertThat(out.toString(), containsString("Usage: kestra sys state-store"));
}
}
}

View File

@@ -25,7 +25,8 @@ import java.net.URI;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
class StateStoreMigrateCommandTest {
@Test
@@ -53,7 +54,10 @@ class StateStoreMigrateCommandTest {
oldStateStoreUri,
new ByteArrayInputStream("my-value".getBytes())
);
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isTrue();
assertThat(
storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri),
is(true)
);
RunContext runContext = ctx.getBean(RunContextFactory.class).of(flow, Map.of("flow", Map.of(
"tenantId", tenantId,
@@ -66,10 +70,13 @@ class StateStoreMigrateCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(StateStoreMigrateCommand.class, ctx, args);
assertThat(new String(stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value").readAllBytes())).isEqualTo("my-value");
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isFalse();
assertThat(new String(stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value").readAllBytes()), is("my-value"));
assertThat(
storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri),
is(false)
);
assertThat(call).isZero();
assertThat(call, is(0));
}
}
}

View File

@@ -15,7 +15,9 @@ import java.net.URL;
import java.util.Map;
import java.util.zip.ZipFile;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.StringContains.containsString;
import static org.hamcrest.core.Is.is;
class TemplateExportCommandTest {
@Test
@@ -40,7 +42,7 @@ class TemplateExportCommandTest {
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("3 template(s)");
assertThat(out.toString(), containsString("3 template(s)"));
// then we export them
String[] exportArgs = {
@@ -54,9 +56,9 @@ class TemplateExportCommandTest {
};
PicocliRunner.call(TemplateExportCommand.class, ctx, exportArgs);
File file = new File("/tmp/templates.zip");
assertThat(file.exists()).isTrue();
assertThat(file.exists(), is(true));
ZipFile zipFile = new ZipFile(file);
assertThat(zipFile.stream().count()).isEqualTo(3L);
assertThat(zipFile.stream().count(), is(3L));
file.delete();
}

View File

@@ -11,9 +11,11 @@ import java.io.PrintStream;
import java.net.URL;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
class TemplateValidateCommandTest {
public class TemplateValidateCommandTest {
@Test
void runLocal() {
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalidsTemplates/template.yml");
@@ -27,9 +29,9 @@ class TemplateValidateCommandTest {
};
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse template");
assertThat(out.toString()).contains("must not be empty");
assertThat(call, is(1));
assertThat(out.toString(), containsString("Unable to parse template"));
assertThat(out.toString(), containsString("must not be empty"));
}
}
@@ -53,9 +55,9 @@ class TemplateValidateCommandTest {
};
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse template");
assertThat(out.toString()).contains("must not be empty");
assertThat(call, is(1));
assertThat(out.toString(), containsString("Unable to parse template"));
assertThat(out.toString(), containsString("must not be empty"));
}
}
}

View File

@@ -7,7 +7,9 @@ import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class TemplateNamespaceCommandTest {
@Test
@@ -19,8 +21,8 @@ class TemplateNamespaceCommandTest {
String[] args = {};
Integer call = PicocliRunner.call(TemplateNamespaceCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra template namespace");
assertThat(call, is(0));
assertThat(out.toString(), containsString("Usage: kestra template namespace"));
}
}
}

View File

@@ -11,7 +11,8 @@ import java.io.PrintStream;
import java.net.URL;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.StringContains.containsString;
class TemplateNamespaceUpdateCommandTest {
@Test
@@ -36,7 +37,7 @@ class TemplateNamespaceUpdateCommandTest {
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("3 template(s)");
assertThat(out.toString(), containsString("3 template(s)"));
}
}
@@ -63,8 +64,8 @@ class TemplateNamespaceUpdateCommandTest {
Integer call = PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
// assertThat(call, is(1));
assertThat(out.toString()).contains("Unable to parse templates");
assertThat(out.toString()).contains("must not be empty");
assertThat(out.toString(), containsString("Unable to parse templates"));
assertThat(out.toString(), containsString("must not be empty"));
}
}
@@ -92,7 +93,7 @@ class TemplateNamespaceUpdateCommandTest {
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("3 template(s)");
assertThat(out.toString(), containsString("3 template(s)"));
String[] newArgs = {
"--server",
@@ -106,7 +107,7 @@ class TemplateNamespaceUpdateCommandTest {
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, newArgs);
assertThat(out.toString()).contains("1 template(s)");
assertThat(out.toString(), containsString("1 template(s)"));
}
}
}

View File

@@ -10,7 +10,8 @@ import java.io.IOException;
import java.nio.file.Files;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
class DeleteConfigurationApplicationListenersTest {
@@ -27,7 +28,7 @@ class DeleteConfigurationApplicationListenersTest {
);
try (ApplicationContext ctx = ApplicationContext.run(mapPropertySource, Environment.CLI, Environment.TEST)) {
assertThat(tempFile.exists()).isFalse();
assertThat(tempFile.exists(), is(false));
}
}
}

View File

@@ -18,9 +18,9 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static io.kestra.core.utils.Rethrow.throwRunnable;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
@MicronautTest(environments = {"test", "file-watch"}, transactional = false)
class FileChangedEventListenerTest {
@@ -44,7 +44,6 @@ class FileChangedEventListenerTest {
@AfterAll
static void tearDown() throws IOException {
if (Files.exists(Path.of(FILE_WATCH))) {
FileUtils.cleanDirectory(Path.of(FILE_WATCH).toFile());
FileUtils.deleteDirectory(Path.of(FILE_WATCH).toFile());
}
}
@@ -59,7 +58,7 @@ class FileChangedEventListenerTest {
@RetryingTest(5) // Flaky on CI but always pass locally
void test() throws IOException, TimeoutException {
// remove the flow if it already exists
flowRepository.findByIdWithSource(MAIN_TENANT, "io.kestra.tests.watch", "myflow").ifPresent(flow -> flowRepository.delete(flow));
flowRepository.findByIdWithSource(null, "io.kestra.tests.watch", "myflow").ifPresent(flow -> flowRepository.delete(flow));
// create a basic flow
String flow = """
@@ -73,19 +72,19 @@ class FileChangedEventListenerTest {
""";
Files.write(Path.of(FILE_WATCH + "/myflow.yaml"), flow.getBytes());
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isPresent(),
() -> flowRepository.findById(null, "io.kestra.tests.watch", "myflow").isPresent(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);
Flow myflow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").orElseThrow();
assertThat(myflow.getTasks()).hasSize(1);
assertThat(myflow.getTasks().getFirst().getId()).isEqualTo("hello");
assertThat(myflow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
Flow myflow = flowRepository.findById(null, "io.kestra.tests.watch", "myflow").orElseThrow();
assertThat(myflow.getTasks(), hasSize(1));
assertThat(myflow.getTasks().getFirst().getId(), is("hello"));
assertThat(myflow.getTasks().getFirst().getType(), is("io.kestra.plugin.core.log.Log"));
// delete the flow
Files.delete(Path.of(FILE_WATCH + "/myflow.yaml"));
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isEmpty(),
() -> flowRepository.findById(null, "io.kestra.tests.watch", "myflow").isEmpty(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);
@@ -94,7 +93,7 @@ class FileChangedEventListenerTest {
@RetryingTest(5) // Flaky on CI but always pass locally
void testWithPluginDefault() throws IOException, TimeoutException {
// remove the flow if it already exists
flowRepository.findByIdWithSource(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").ifPresent(flow -> flowRepository.delete(flow));
flowRepository.findByIdWithSource(null, "io.kestra.tests.watch", "pluginDefault").ifPresent(flow -> flowRepository.delete(flow));
// create a flow with plugin default
String pluginDefault = """
@@ -112,19 +111,19 @@ class FileChangedEventListenerTest {
""";
Files.write(Path.of(FILE_WATCH + "/plugin-default.yaml"), pluginDefault.getBytes());
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isPresent(),
() -> flowRepository.findById(null, "io.kestra.tests.watch", "pluginDefault").isPresent(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);
Flow pluginDefaultFlow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
assertThat(pluginDefaultFlow.getTasks()).hasSize(1);
assertThat(pluginDefaultFlow.getTasks().getFirst().getId()).isEqualTo("helloWithDefault");
assertThat(pluginDefaultFlow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
Flow pluginDefaultFlow = flowRepository.findById(null, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
assertThat(pluginDefaultFlow.getTasks(), hasSize(1));
assertThat(pluginDefaultFlow.getTasks().getFirst().getId(), is("helloWithDefault"));
assertThat(pluginDefaultFlow.getTasks().getFirst().getType(), is("io.kestra.plugin.core.log.Log"));
// delete both files
Files.delete(Path.of(FILE_WATCH + "/plugin-default.yaml"));
Await.until(
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
() -> flowRepository.findById(null, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);

View File

@@ -2,7 +2,6 @@ micronaut:
io:
watch:
enabled: true
tenantId: main
paths:
- build/file-watch

View File

@@ -1,7 +0,0 @@
id: date
namespace: io.kestra.cli
tasks:
- id: date
type: io.kestra.plugin.core.debug.Return
format: "{{taskrun.startDate}}"

View File

@@ -36,7 +36,6 @@ dependencies {
implementation group: 'de.focus-shift', name: 'jollyday-jaxb'
implementation 'nl.basjes.gitignore:gitignore-reader'
implementation group: 'dev.failsafe', name: 'failsafe'
implementation 'com.github.ben-manes.caffeine:caffeine'
api 'org.apache.httpcomponents.client5:httpclient5'
// plugins
@@ -74,7 +73,7 @@ dependencies {
testImplementation "io.micronaut:micronaut-http-server-netty"
testImplementation "io.micronaut:micronaut-management"
testImplementation "org.testcontainers:testcontainers:1.21.1"
testImplementation "org.testcontainers:junit-jupiter:1.21.1"
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.81"
testImplementation "org.testcontainers:testcontainers:1.20.6"
testImplementation "org.testcontainers:junit-jupiter:1.20.6"
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.80"
}

View File

@@ -1,92 +0,0 @@
package io.kestra.core.cache;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Policy;
import com.github.benmanes.caffeine.cache.stats.CacheStats;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
/**
* A No-Op implementation of a Caffeine Cache.
* Useful to disable caching but still use a cache to avoid if/else chains
*/
public class NoopCache<K, V> implements Cache<K, V> {
private static final ConcurrentMap<?, ?> EMPTY_MAP = new ConcurrentHashMap<>(0);
@Override
public @Nullable V getIfPresent(K key) {
return null;
}
@Override
public V get(K key, Function<? super K, ? extends V> mappingFunction) {
return mappingFunction.apply(key);
}
@Override
public Map<K, @NonNull V> getAllPresent(Iterable<? extends K> keys) {
return Collections.emptyMap();
}
@Override
public Map<K, @NonNull V> getAll(Iterable<? extends K> keys, Function<? super Set<? extends K>, ? extends Map<? extends K, ? extends @NonNull V>> mappingFunction) {
return Collections.emptyMap();
}
@Override
public void put(K key, @NonNull V value) {
// just do nothing
}
@Override
public void putAll(Map<? extends K, ? extends @NonNull V> map) {
// just do nothing
}
@Override
public void invalidate(K key) {
// just do nothing
}
@Override
public void invalidateAll(Iterable<? extends K> keys) {
// just do nothing
}
@Override
public void invalidateAll() {
// just do nothing
}
@Override
public long estimatedSize() {
return 0;
}
@Override
public CacheStats stats() {
return CacheStats.empty();
}
@Override
public ConcurrentMap<K, @NonNull V> asMap() {
return (ConcurrentMap<K, V>) EMPTY_MAP;
}
@Override
public void cleanUp() {
// just do nothing
}
@Override
public Policy<K, @NonNull V> policy() {
throw new UnsupportedOperationException();
}
}

View File

@@ -2,7 +2,6 @@ package io.kestra.core.contexts;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.plugins.DefaultPluginRegistry;
import io.kestra.core.plugins.PluginCatalogService;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.StorageInterfaceFactory;
@@ -14,8 +13,6 @@ import io.micronaut.context.annotation.Value;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.core.convert.format.MapFormat;
import io.micronaut.core.naming.conventions.StringConvention;
import io.micronaut.http.client.HttpClient;
import io.micronaut.http.client.annotation.Client;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.validation.Validator;
@@ -38,11 +35,6 @@ public class KestraBeansFactory {
@Value("${kestra.storage.type}")
protected Optional<String> storageType;
@Singleton
public PluginCatalogService pluginCatalogService(@Client("api") HttpClient httpClient) {
return new PluginCatalogService(httpClient, false, true);
}
@Requires(missingBeans = PluginRegistry.class)
@Singleton
public PluginRegistry pluginRegistry() {

View File

@@ -2,13 +2,11 @@ package io.kestra.core.contexts;
import io.kestra.core.models.ServerType;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.VersionProvider;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.annotation.Context;
import io.micronaut.context.annotation.Requires;
import io.micronaut.context.env.Environment;
import io.micronaut.context.env.PropertySource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -82,8 +80,6 @@ public abstract class KestraContext {
*/
public abstract PluginRegistry getPluginRegistry();
public abstract StorageInterface getStorageInterface();
/**
* Shutdowns the Kestra application.
*/
@@ -150,7 +146,7 @@ public abstract class KestraContext {
.ifPresent(val -> configs.put(KESTRA_WORKER_GROUP_KEY, val));
if (!configs.isEmpty()) {
environment.addPropertySource(PropertySource.of("kestra-runtime", configs));
environment.addPropertySource("kestra-runtime", configs);
}
}
@@ -176,11 +172,5 @@ public abstract class KestraContext {
// Lazy init of the PluginRegistry.
return this.applicationContext.getBean(PluginRegistry.class);
}
@Override
public StorageInterface getStorageInterface() {
// Lazy init of the PluginRegistry.
return this.applicationContext.getBean(StorageInterface.class);
}
}
}

View File

@@ -1,6 +1,9 @@
package io.kestra.core.docs;
import com.google.common.base.CaseFormat;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.models.tasks.runners.TaskRunner;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
@@ -56,7 +59,7 @@ public abstract class AbstractClassDocumentation<T> {
.filter(entry -> (baseCls == null) || !entry.getKey().startsWith("io.kestra.core.models.flows.input."))
.map(entry -> {
Map<String, Object> value = (Map<String, Object>) entry.getValue();
value.put("properties", flatten(properties(value), required(value), null));
value.put("properties", flatten(properties(value), required(value), isTypeToKeep(entry.getKey())));
return new AbstractMap.SimpleEntry<>(
entry.getKey(),
@@ -89,13 +92,20 @@ public abstract class AbstractClassDocumentation<T> {
}
if (this.propertiesSchema.containsKey("properties")) {
this.inputs = flattenWithoutType(properties(this.propertiesSchema), required(this.propertiesSchema));
this.inputs = flatten(properties(this.propertiesSchema), required(this.propertiesSchema));
}
}
protected static Map<String, Object> flattenWithoutType(Map<String, Object> map, List<String> required) {
protected static Map<String, Object> flatten(Map<String, Object> map, List<String> required) {
map.remove("type");
return flatten(map, required, null);
return flatten(map, required, (String) null);
}
protected static Map<String, Object> flatten(Map<String, Object> map, List<String> required, Boolean keepType) {
if (!keepType) {
map.remove("type");
}
return flatten(map, required, (String) null);
}
@SuppressWarnings("unchecked")
@@ -131,6 +141,23 @@ public abstract class AbstractClassDocumentation<T> {
return result;
}
// Some task can have the `type` property but not to represent the task
// so we cant to keep it in the doc
private Boolean isTypeToKeep(String key){
try {
if (AbstractRetry.class.isAssignableFrom(Class.forName(key))) {
return true;
}
if (TaskRunner.class.isAssignableFrom(Class.forName(key))) {
return true;
}
} catch (ClassNotFoundException ignored) {
log.debug(ignored.getMessage(), ignored);
}
return false;
}
protected static String flattenKey(String current, String parent) {
return (parent != null ? parent + "." : "") + current;
}

View File

@@ -1,14 +1,9 @@
package io.kestra.core.docs;
import io.kestra.core.plugins.PluginClassAndMetadata;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import lombok.*;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.*;
@Getter
@EqualsAndHashCode
@@ -55,7 +50,7 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
}
if (this.outputsSchema.containsKey("properties")) {
this.outputs = flattenWithoutType(properties(this.outputsSchema), required(this.outputsSchema));
this.outputs = flatten(properties(this.outputsSchema), required(this.outputsSchema));
}
// metrics

View File

@@ -7,7 +7,6 @@ import io.kestra.core.models.tasks.logs.LogExporter;
import io.kestra.core.models.tasks.runners.TaskRunner;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.plugins.AdditionalPlugin;
import io.kestra.core.plugins.PluginClassAndMetadata;
import io.kestra.core.plugins.RegisteredPlugin;
import io.kestra.core.runners.pebble.Extension;
@@ -76,7 +75,6 @@ public class DocumentationGenerator {
//noinspection unchecked
result.addAll(this.generate(registeredPlugin, registeredPlugin.getTaskRunners(), (Class) TaskRunner.class, "task-runners"));
result.addAll(this.generate(registeredPlugin, registeredPlugin.getLogExporters(), (Class) LogExporter.class, "log-exporters"));
result.addAll(this.generate(registeredPlugin, registeredPlugin.getAdditionalPlugins(), AdditionalPlugin.class, "additional-plugins"));
result.addAll(guides(registeredPlugin));

View File

@@ -2,11 +2,8 @@ package io.kestra.core.docs;
import com.fasterxml.classmate.ResolvedType;
import com.fasterxml.classmate.members.HierarchicType;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
@@ -24,10 +21,8 @@ import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.dashboards.DataFilter;
import io.kestra.core.models.dashboards.DataFilterKPI;
import io.kestra.core.models.dashboards.charts.Chart;
import io.kestra.core.models.dashboards.charts.DataChart;
import io.kestra.core.models.dashboards.charts.DataChartKPI;
import io.kestra.core.models.property.Data;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.Output;
@@ -36,7 +31,6 @@ import io.kestra.core.models.tasks.common.EncryptedString;
import io.kestra.core.models.tasks.logs.LogExporter;
import io.kestra.core.models.tasks.runners.TaskRunner;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.plugins.AdditionalPlugin;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.plugins.RegisteredPlugin;
import io.kestra.core.serializers.JacksonMapper;
@@ -53,18 +47,9 @@ import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
@Singleton
public class JsonSchemaGenerator {
private static final List<Class<?>> TYPES_RESOLVED_AS_STRING = List.of(Duration.class, LocalTime.class, LocalDate.class, LocalDateTime.class, ZonedDateTime.class, OffsetDateTime.class, OffsetTime.class);
private static final List<Class<?>> SUBTYPE_RESOLUTION_EXCLUSION_FOR_PLUGIN_SCHEMA = List.of(Task.class, AbstractTrigger.class);
private static final ObjectMapper MAPPER = JacksonMapper.ofJson().copy()
.configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false);
private static final ObjectMapper YAML_MAPPER = JacksonMapper.ofYaml().copy()
.configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false);
private final PluginRegistry pluginRegistry;
@@ -107,7 +92,7 @@ public class JsonSchemaGenerator {
pullDocumentationAndDefaultFromAnyOf(objectNode);
removeRequiredOnPropsWithDefaults(objectNode);
return MAPPER.convertValue(objectNode, MAP_TYPE_REFERENCE);
return JacksonMapper.toMap(objectNode);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Unable to generate jsonschema for '" + cls.getName() + "'", e);
}
@@ -132,15 +117,6 @@ public class JsonSchemaGenerator {
}
}
});
// do the same for all definitions
if (objectNode.get("definitions") instanceof ObjectNode definitions) {
definitions.forEach(jsonNode -> {
if (jsonNode instanceof ObjectNode definition) {
removeRequiredOnPropsWithDefaults(definition);
}
});
}
}
// This hack exists because for Property we generate a anyOf for properties that are not strings.
@@ -200,7 +176,7 @@ public class JsonSchemaGenerator {
try {
sb.append("Default value is : `")
.append(YAML_MAPPER.writeValueAsString(collectedTypeAttributes.get("default")).trim())
.append(JacksonMapper.ofYaml().writeValueAsString(collectedTypeAttributes.get("default")).trim())
.append("`");
} catch (JsonProcessingException ignored) {
@@ -240,7 +216,6 @@ public class JsonSchemaGenerator {
}
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7) {
// builder.withObjectMapper(builder.getObjectMapper().configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false));
builder
.with(new JakartaValidationModule(
JakartaValidationOption.NOT_NULLABLE_METHOD_IS_REQUIRED,
@@ -251,12 +226,15 @@ public class JsonSchemaGenerator {
.with(Option.DEFINITIONS_FOR_ALL_OBJECTS)
.with(Option.DEFINITION_FOR_MAIN_SCHEMA)
.with(Option.PLAIN_DEFINITION_KEYS)
.with(Option.ALLOF_CLEANUP_AT_THE_END);;
.with(Option.ALLOF_CLEANUP_AT_THE_END);
if (!draft7) {
builder.with(new JacksonModule(JacksonOption.IGNORE_TYPE_INFO_TRANSFORM));
builder
.with(new JacksonModule(JacksonOption.IGNORE_TYPE_INFO_TRANSFORM))
.with(Option.MAP_VALUES_AS_ADDITIONAL_PROPERTIES);
} else {
builder.with(new JacksonModule());
builder
.with(new JacksonModule());
}
// default value
@@ -356,9 +334,6 @@ public class JsonSchemaGenerator {
if (pluginPropertyAnnotation.internalStorageURI()) {
memberAttributes.put("$internalStorageURI", true);
}
if (!pluginPropertyAnnotation.group().isEmpty()) {
memberAttributes.put("$group", pluginPropertyAnnotation.group());
}
}
Schema schema = member.getAnnotationConsideringFieldAndGetter(Schema.class);
@@ -450,8 +425,8 @@ public class JsonSchemaGenerator {
return Object.class;
});
// Subtype resolver for all plugins
if (builder.build().getSchemaVersion() != SchemaVersion.DRAFT_2019_09) {
// Subtype resolver for all plugins
builder.forTypesInGeneral()
.withSubtypeResolver((declaredType, context) -> {
TypeContext typeContext = context.getTypeContext();
@@ -524,87 +499,21 @@ public class JsonSchemaGenerator {
collectedTypeAttributes.remove("$examples");
}
});
} else {
builder.forTypesInGeneral()
.withSubtypeResolver((declaredType, context) -> {
TypeContext typeContext = context.getTypeContext();
if (SUBTYPE_RESOLUTION_EXCLUSION_FOR_PLUGIN_SCHEMA.contains(declaredType.getErasedType())) {
return null;
// Ensure that `type` is defined as a constant in JSON Schema.
// The `const` property is used by editors for auto-completion based on that schema.
builder.forTypesInGeneral().withTypeAttributeOverride((collectedTypeAttributes, scope, context) -> {
final Class<?> pluginType = scope.getType().getErasedType();
if (pluginType.getAnnotation(Plugin.class) != null) {
ObjectNode properties = (ObjectNode) collectedTypeAttributes.get("properties");
if (properties != null) {
properties.set("type", context.getGeneratorConfig().createObjectNode()
.put("const", pluginType.getName())
);
}
return this.subtypeResolver(declaredType, typeContext);
});
}
// Ensure that `type` is defined as a constant in JSON Schema.
// The `const` property is used by editors for auto-completion based on that schema.
builder.forTypesInGeneral().withTypeAttributeOverride((collectedTypeAttributes, scope, context) -> {
final Class<?> pluginType = scope.getType().getErasedType();
if (pluginType.getAnnotation(Plugin.class) != null) {
ObjectNode properties = (ObjectNode) collectedTypeAttributes.get("properties");
if (properties != null) {
properties.set("type", context.getGeneratorConfig().createObjectNode()
.put("const", pluginType.getName())
);
}
}
});
typeDefiningPropertiesToConst(builder);
}
/**
* Properties which are defining an implementation to choose among multiple ones (JsonTypeInfo.property) are simple String with default. We move them to be a "const": "defaultValue" instead
*/
private void typeDefiningPropertiesToConst(SchemaGeneratorConfigBuilder builder) {
builder.forTypesInGeneral().withTypeAttributeOverride((collectedTypeAttributes, scope, context) -> {
final Class<?> targetType = scope.getType().getErasedType();
JsonTypeInfo jsonTypeInfo = Optional.ofNullable(targetType.getSuperclass()).map(c -> c.getAnnotation(JsonTypeInfo.class)).orElse(null);
if (jsonTypeInfo == null) {
return;
}
String property = jsonTypeInfo.property();
if (property == null) {
return;
}
ObjectNode properties = (ObjectNode) collectedTypeAttributes.get("properties");
if (properties == null) {
return;
}
String defaultValue = Optional.ofNullable(properties.get(property))
.flatMap(p -> {
Optional<String> defaultOpt = p.optional("default").map(JsonNode::asText);
if (defaultOpt.isPresent()) {
return defaultOpt;
}
return p.optional("allOf").flatMap(node -> {
if (node.isArray()) {
Iterable<JsonNode> iterable = node::values;
return StreamSupport.stream(
iterable.spliterator(),
false
).filter(subNode -> subNode.has("default"))
.findFirst()
.map(subNode -> subNode.get("default").asText());
}
return Optional.empty();
});
})
.orElse(null);
if (defaultValue == null) {
return;
}
properties.set(property, context.getGeneratorConfig().createObjectNode()
.put("const", defaultValue)
);
});
});
}
}
private boolean isAssignableFromResolvedAsString(Class<?> declaredType) {
@@ -660,16 +569,6 @@ public class JsonSchemaGenerator {
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
.toList();
} else if (AdditionalPlugin.class.isAssignableFrom(declaredType.getErasedType())) { // base type for addition plugin is not AdditionalPlugin but a subtype of AdditionalPlugin.
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getAdditionalPlugins().stream())
// for additional plugins, we have one subtype by type of additional plugins (for ex: embedding store for Langchain4J), so we need to filter on the correct subtype
.filter(cls -> declaredType.getErasedType().isAssignableFrom(cls))
.filter(cls -> cls != declaredType.getErasedType())
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
.toList();
} else if (declaredType.getErasedType() == Chart.class) {
return getRegisteredPlugins()
.stream()
@@ -685,25 +584,10 @@ public class JsonSchemaGenerator {
TypeVariable<? extends Class<? extends Chart<?>>> dataFilterType = clz.getTypeParameters()[1];
ParameterizedType chartAwareColumnDescriptor = ((ParameterizedType) ((WildcardType) ((ParameterizedType) dataFilterType.getBounds()[0]).getActualTypeArguments()[1]).getUpperBounds()[0]);
dataFilters.forEach(dataFilter -> {
Type fieldsEnum = ((ParameterizedType) dataFilter.getGenericSuperclass()).getActualTypeArguments()[0];
consumer.accept(typeContext.resolve(clz, fieldsEnum, typeContext.resolve(dataFilter, typeContext.resolve(chartAwareColumnDescriptor, fieldsEnum))));
});
} else if (DataChartKPI.class.isAssignableFrom(clz)) {
List<Class<? extends DataFilterKPI<?, ?>>> dataFilterKPIs = getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getDataFiltersKPI().stream())
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.toList();
TypeVariable<? extends Class<? extends Chart<?>>> dataFilterType = clz.getTypeParameters()[1];
ParameterizedType chartAwareColumnDescriptor = ((ParameterizedType) ((WildcardType) ((ParameterizedType) dataFilterType.getBounds()[0]).getActualTypeArguments()[1]).getUpperBounds()[0]);
dataFilterKPIs.forEach(dataFilterKPI -> {
Type fieldsEnum = ((ParameterizedType) dataFilterKPI.getGenericSuperclass()).getActualTypeArguments()[0];
consumer.accept(typeContext.resolve(clz, fieldsEnum, typeContext.resolve(dataFilterKPI, typeContext.resolve(chartAwareColumnDescriptor, fieldsEnum))));
});
} else {
consumer.accept(typeContext.resolve(clz));
}
@@ -747,13 +631,10 @@ public class JsonSchemaGenerator {
this.build(builder, false);
// we don't return base properties unless specified with @PluginProperty and hidden is false
// we don't return base properties unless specified with @PluginProperty
builder
.forFields()
.withIgnoreCheck(fieldScope -> base != null &&
(fieldScope.getAnnotation(PluginProperty.class) == null || fieldScope.getAnnotation(PluginProperty.class).hidden()) &&
fieldScope.getDeclaringType().getTypeName().equals(base.getName())
);
.withIgnoreCheck(fieldScope -> base != null && fieldScope.getAnnotation(PluginProperty.class) == null && fieldScope.getDeclaringType().getTypeName().equals(base.getName()));
SchemaGeneratorConfig schemaGeneratorConfig = builder.build();
@@ -764,7 +645,7 @@ public class JsonSchemaGenerator {
pullDocumentationAndDefaultFromAnyOf(objectNode);
removeRequiredOnPropsWithDefaults(objectNode);
return MAPPER.convertValue(extractMainRef(objectNode), MAP_TYPE_REFERENCE);
return JacksonMapper.toMap(extractMainRef(objectNode));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Unable to generate jsonschema for '" + cls.getName() + "'", e);
}

View File

@@ -37,7 +37,6 @@ public class Plugin {
private List<String> charts;
private List<String> dataFilters;
private List<String> logExporters;
private List<String> additionalPlugins;
private List<PluginSubGroup.PluginCategory> categories;
private String subGroup;
@@ -90,18 +89,17 @@ public class Plugin {
plugin.subGroup = subgroup;
Predicate<Class<?>> packagePredicate = c -> subgroup == null || c.getPackageName().equals(subgroup);
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated, packagePredicate);
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated, packagePredicate);
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated, packagePredicate);
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated, packagePredicate);
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated, packagePredicate);
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated, packagePredicate);
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated, packagePredicate);
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated, packagePredicate);
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated, packagePredicate);
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated, packagePredicate);
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated, packagePredicate);
plugin.additionalPlugins = filterAndGetClassName(registeredPlugin.getAdditionalPlugins(), includeDeprecated, packagePredicate);
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated, packagePredicate).stream().toList();
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated, packagePredicate).stream().toList();
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated, packagePredicate).stream().toList();
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated, packagePredicate).stream().toList();
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated, packagePredicate).stream().toList();
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated, packagePredicate).stream().toList();
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated, packagePredicate).stream().toList();
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated, packagePredicate).stream().toList();
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated, packagePredicate).stream().toList();
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated, packagePredicate).stream().toList();
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated, packagePredicate).stream().toList();
return plugin;
}

View File

@@ -9,9 +9,6 @@ import java.util.Map;
@NoArgsConstructor
@AllArgsConstructor
@Data
@io.swagger.v3.oas.annotations.media.Schema(
name = "PluginSchema"
)
public class Schema {
private Map<String, Object> properties;
private Map<String, Object> outputs;

View File

@@ -11,7 +11,6 @@ public enum SchemaType {
TRIGGER,
PLUGINDEFAULT,
APPS,
TESTSUITES,
DASHBOARD;
@JsonCreator

View File

@@ -1,30 +0,0 @@
package io.kestra.core.exceptions;
/**
* General exception that can be thrown when a Kestra resource or entity conflicts with an existing one.
* <p>
* Typically used in REST API contexts to signal situations such as:
* attempting to create a resource that already exists, or updating a resource
* in a way that causes a conflict.
* <p>
* When propagated in the context of a REST API call, this exception should
* result in an HTTP 409 Conflict response.
*/
public class ConflictException extends KestraRuntimeException {
/**
* Creates a new {@link ConflictException} instance.
*/
public ConflictException() {
super();
}
/**
* Creates a new {@link ConflictException} instance.
*
* @param message the error message.
*/
public ConflictException(final String message) {
super(message);
}
}

View File

@@ -1,24 +0,0 @@
package io.kestra.core.exceptions;
import java.io.Serial;
/**
* Exception class for all problems encountered when processing (parsing, injecting defaults, validating) a flow.
*/
public class FlowProcessingException extends KestraException {
@Serial
private static final long serialVersionUID = 1L;
public FlowProcessingException(String message) {
super(message);
}
public FlowProcessingException(String message, Throwable cause) {
super(message, cause);
}
public FlowProcessingException(Throwable cause) {
super(cause);
}
}

View File

@@ -1,43 +0,0 @@
package io.kestra.core.exceptions;
import java.io.Serial;
import java.util.List;
/**
* General exception that can be throws when a Kestra entity field is query, but is not valid or existing.
*/
public class InvalidQueryFiltersException extends KestraRuntimeException {
@Serial
private static final long serialVersionUID = 1L;
private static final String INVALID_QUERY_FILTER_MESSAGE = "Provided query filters are invalid";
private transient final List<String> invalids;
/**
* Creates a new {@link InvalidQueryFiltersException} instance.
*
* @param invalids the invalid filters.
*/
public InvalidQueryFiltersException(final List<String> invalids) {
super(INVALID_QUERY_FILTER_MESSAGE);
this.invalids = invalids;
}
/**
* Creates a new {@link InvalidQueryFiltersException} instance.
*
* @param invalid the invalid filter.
*/
public InvalidQueryFiltersException(final String invalid) {
super(INVALID_QUERY_FILTER_MESSAGE);
this.invalids = List.of(invalid);
}
public String formatedInvalidObjects(){
if (invalids == null || invalids.isEmpty()){
return INVALID_QUERY_FILTER_MESSAGE;
}
return String.join(", ", invalids);
}
}

View File

@@ -1,27 +0,0 @@
package io.kestra.core.exceptions;
import java.io.Serial;
/**
* The top-level {@link KestraException}..
*/
public class KestraException extends Exception {
@Serial
private static final long serialVersionUID = 1L;
public KestraException() {
}
public KestraException(String message) {
super(message);
}
public KestraException(String message, Throwable cause) {
super(message, cause);
}
public KestraException(Throwable cause) {
super(cause);
}
}

View File

@@ -1,23 +0,0 @@
package io.kestra.core.exceptions;
/**
* General exception that can be throws when a Kestra resource or entity is not found.
*/
public class NotFoundException extends KestraRuntimeException {
/**
* Creates a new {@link NotFoundException} instance.
*/
public NotFoundException() {
super();
}
/**
* Creates a new {@link NotFoundException} instance.
*
* @param message the error message.
*/
public NotFoundException(final String message) {
super(message);
}
}

View File

@@ -155,14 +155,6 @@ public class HttpClient implements Closeable {
builder.addResponseInterceptorLast(new FailedResponseInterceptor());
}
if (this.configuration.getAllowedResponseCodes() != null) {
List<Integer> list = runContext.render(this.configuration.getAllowedResponseCodes()).asList(Integer.class);
if (!list.isEmpty()) {
builder.addResponseInterceptorLast(new FailedResponseInterceptor(list));
}
}
builder.addResponseInterceptorLast(new RunContextResponseInterceptor(this.runContext));
// builder object
@@ -284,7 +276,7 @@ public class HttpClient implements Closeable {
} else if (cls.isAssignableFrom(Byte[].class)) {
return (T) ArrayUtils.toObject(EntityUtils.toByteArray(entity));
} else {
return (T) JacksonMapper.ofJson(false).readValue(entity.getContent(), cls);
return (T) JacksonMapper.ofJson().readValue(entity.getContent(), cls);
}
}

View File

@@ -3,6 +3,7 @@ package io.kestra.core.http.client.apache;
import io.kestra.core.http.HttpResponse;
import io.kestra.core.http.HttpService;
import io.kestra.core.http.client.HttpClientResponseException;
import lombok.AllArgsConstructor;
import org.apache.hc.core5.http.EntityDetails;
import org.apache.hc.core5.http.HttpEntityContainer;
import org.apache.hc.core5.http.HttpException;
@@ -11,43 +12,22 @@ import org.apache.hc.core5.http.protocol.HttpContext;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
@AllArgsConstructor
public class FailedResponseInterceptor implements HttpResponseInterceptor {
private final boolean allErrors;
private List<Integer> statusCodes;
public FailedResponseInterceptor() {
this.allErrors = true;
}
public FailedResponseInterceptor(List<Integer> statusCodes) {
this.statusCodes = statusCodes;
this.allErrors = false;
}
@Override
public void process(org.apache.hc.core5.http.HttpResponse response, EntityDetails entity, HttpContext context) throws HttpException, IOException {
if (this.allErrors && response.getCode() >= 400) {
this.raiseError(response, context);
if (response.getCode() >= 400) {
String error = "Failed http request with response code '" + response.getCode() + "'";
if (response instanceof HttpEntityContainer httpEntity && httpEntity.getEntity() != null) {
HttpService.HttpEntityCopy copy = HttpService.copy(httpEntity.getEntity());
httpEntity.setEntity(copy);
error += " and body:\n" + new String(copy.getBody(), StandardCharsets.UTF_8);
}
throw new HttpClientResponseException(error, HttpResponse.from(response, context));
}
if (this.statusCodes != null && !this.statusCodes.contains(response.getCode())) {
this.raiseError(response, context);
}
}
private void raiseError(org.apache.hc.core5.http.HttpResponse response, HttpContext context) throws IOException, HttpClientResponseException {
String error = "Failed http request with response code '" + response.getCode() + "'";
if (response instanceof HttpEntityContainer httpEntity && httpEntity.getEntity() != null) {
HttpService.HttpEntityCopy copy = HttpService.copy(httpEntity.getEntity());
httpEntity.setEntity(copy);
error += " and body:\n" + new String(copy.getBody(), StandardCharsets.UTF_8);
}
throw new HttpClientResponseException(error, HttpResponse.from(response, context));
}
}

View File

@@ -13,7 +13,6 @@ import java.net.Proxy;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.List;
@Builder(toBuilder = true)
@Getter
@@ -36,20 +35,16 @@ public class HttpConfiguration {
@Schema(title = "Whether redirects should be followed automatically.")
@Builder.Default
private Property<Boolean> followRedirects = Property.ofValue(true);
private Property<Boolean> followRedirects = Property.of(true);
@Setter
@Schema(title = "If true, allow a failed response code (response code >= 400)")
@Builder.Default
private Property<Boolean> allowFailed = Property.ofValue(false);
@Setter
@Schema(title = "List of response code allowed for this request")
private Property<List<Integer>> allowedResponseCodes;
private Property<Boolean> allowFailed = Property.of(false);
@Schema(title = "The default charset for the request.")
@Builder.Default
private final Property<Charset> defaultCharset = Property.ofValue(StandardCharsets.UTF_8);
private final Property<Charset> defaultCharset = Property.of(StandardCharsets.UTF_8);
@Schema(title = "The enabled log.")
@PluginProperty
@@ -126,7 +121,7 @@ public class HttpConfiguration {
}
this.timeout = this.timeout.toBuilder()
.connectTimeout(Property.ofValue(connectTimeout))
.connectTimeout(Property.of(connectTimeout))
.build();
return this;
@@ -140,7 +135,7 @@ public class HttpConfiguration {
}
this.timeout = this.timeout.toBuilder()
.readIdleTimeout(Property.ofValue(readTimeout))
.readIdleTimeout(Property.of(readTimeout))
.build();
return this;
@@ -155,7 +150,7 @@ public class HttpConfiguration {
}
this.proxy = this.proxy.toBuilder()
.type(Property.ofValue(proxyType))
.type(Property.of(proxyType))
.build();
return this;
@@ -169,7 +164,7 @@ public class HttpConfiguration {
}
this.proxy = this.proxy.toBuilder()
.address(Property.ofValue(proxyAddress))
.address(Property.of(proxyAddress))
.build();
return this;
@@ -183,7 +178,7 @@ public class HttpConfiguration {
}
this.proxy = this.proxy.toBuilder()
.port(Property.ofValue(proxyPort))
.port(Property.of(proxyPort))
.build();
return this;
@@ -197,7 +192,7 @@ public class HttpConfiguration {
}
this.proxy = this.proxy.toBuilder()
.username(Property.ofValue(proxyUsername))
.username(Property.of(proxyUsername))
.build();
return this;
@@ -211,7 +206,7 @@ public class HttpConfiguration {
}
this.proxy = this.proxy.toBuilder()
.password(Property.ofValue(proxyPassword))
.password(Property.of(proxyPassword))
.build();
return this;
@@ -227,7 +222,7 @@ public class HttpConfiguration {
}
this.auth = ((BasicAuthConfiguration) this.auth).toBuilder()
.username(Property.ofValue(basicAuthUser))
.username(Property.of(basicAuthUser))
.build();
return this;
@@ -242,7 +237,7 @@ public class HttpConfiguration {
}
this.auth = ((BasicAuthConfiguration) this.auth).toBuilder()
.password(Property.ofValue(basicAuthPassword))
.password(Property.of(basicAuthPassword))
.build();
return this;

View File

@@ -14,7 +14,7 @@ import java.net.Proxy;
public class ProxyConfiguration {
@Schema(title = "The type of proxy to use.")
@Builder.Default
private final Property<java.net.Proxy.Type> type = Property.ofValue(Proxy.Type.DIRECT);
private final Property<java.net.Proxy.Type> type = Property.of(Proxy.Type.DIRECT);
@Schema(title = "The address of the proxy server.")
private final Property<String> address;

View File

@@ -15,5 +15,5 @@ public class TimeoutConfiguration {
@Schema(title = "The time allowed for a read connection to remain idle before closing it.")
@Builder.Default
Property<Duration> readIdleTimeout = Property.ofValue(Duration.ofMinutes(5));
Property<Duration> readIdleTimeout = Property.of(Duration.ofMinutes(5));
}

View File

@@ -12,7 +12,6 @@ public class KestraLogFilter extends EventEvaluatorBase<ILoggingEvent> {
// we use startWith and do all checks successfully instead of using a more elegant construct like Stream...
return message.startsWith("outOfOrder mode is active. Migration of schema") ||
message.startsWith("Version mismatch : Database version is older than what dialect POSTGRES supports") ||
message.startsWith("Failed to bind as java.util.concurrent.Executors$AutoShutdownDelegatedExecutorService is unsupported.") ||
message.startsWith("The cache 'default' is not recording statistics.");
message.startsWith("Failed to bind as java.util.concurrent.Executors$AutoShutdownDelegatedExecutorService is unsupported.");
}
}

View File

@@ -1,14 +1,11 @@
package io.kestra.core.metrics;
import io.kestra.core.models.ServerType;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import io.micronaut.configuration.metrics.aggregator.MeterRegistryConfigurer;
import io.micronaut.context.annotation.Requires;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import io.micronaut.context.annotation.Value;
import io.micronaut.core.annotation.Nullable;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
@@ -18,26 +15,20 @@ public class GlobalTagsConfigurer implements MeterRegistryConfigurer<SimpleMeter
@Inject
MetricConfig metricConfig;
@Nullable
@Value("${kestra.server-type}")
ServerType serverType;
@Override
public void configure(SimpleMeterRegistry meterRegistry) {
String[] tags = Stream
.concat(
metricConfig.getTags() != null ? metricConfig.getTags()
.entrySet()
.stream()
.flatMap(e -> Stream.of(e.getKey(), e.getValue())) : Stream.empty(),
serverType != null ? Stream.of("server_type", serverType.name()) : Stream.empty()
)
.toList()
.toArray(String[]::new);
meterRegistry
.config()
.commonTags(tags);
if (metricConfig.getTags() != null) {
meterRegistry
.config()
.commonTags(
metricConfig.getTags()
.entrySet()
.stream()
.flatMap(e -> Stream.of(e.getKey(), e.getValue()))
.toList()
.toArray(String[]::new)
);
}
}
@Override

View File

@@ -1,15 +1,16 @@
package io.kestra.core.metrics;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.runners.*;
import io.kestra.core.runners.SubflowExecutionResult;
import io.kestra.core.runners.WorkerTask;
import io.kestra.core.runners.WorkerTaskResult;
import io.kestra.core.runners.WorkerTrigger;
import io.kestra.core.schedulers.SchedulerExecutionWithTrigger;
import io.micrometer.core.instrument.*;
import io.micrometer.core.instrument.binder.MeterBinder;
import io.micrometer.core.instrument.search.Search;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
@@ -19,119 +20,49 @@ import org.apache.commons.lang3.ArrayUtils;
@Slf4j
public class MetricRegistry {
public static final String METRIC_WORKER_JOB_PENDING_COUNT = "worker.job.pending";
public static final String METRIC_WORKER_JOB_PENDING_COUNT_DESCRIPTION = "The number of jobs (tasks or triggers) pending to be run by the Worker";
public static final String METRIC_WORKER_JOB_RUNNING_COUNT = "worker.job.running";
public static final String METRIC_WORKER_JOB_RUNNING_COUNT_DESCRIPTION = "The number of jobs (tasks or triggers) currently running inside the Worker";
public static final String METRIC_WORKER_JOB_THREAD_COUNT = "worker.job.thread";
public static final String METRIC_WORKER_JOB_THREAD_COUNT_DESCRIPTION = "The number of worker threads";
public static final String METRIC_WORKER_RUNNING_COUNT = "worker.running.count";
public static final String METRIC_WORKER_RUNNING_COUNT_DESCRIPTION = "The number of tasks currently running inside the Worker";
public static final String METRIC_WORKER_QUEUED_DURATION = "worker.queued.duration";
public static final String METRIC_WORKER_QUEUED_DURATION_DESCRIPTION = "Task queued duration inside the Worker";
public static final String METRIC_WORKER_STARTED_COUNT = "worker.started.count";
public static final String METRIC_WORKER_STARTED_COUNT_DESCRIPTION = "The total number of tasks started by the Worker";
public static final String METRIC_WORKER_TIMEOUT_COUNT = "worker.timeout.count";
public static final String METRIC_WORKER_TIMEOUT_COUNT_DESCRIPTION = "The total number of tasks that timeout inside the Worker";
public static final String METRIC_WORKER_ENDED_COUNT = "worker.ended.count";
public static final String METRIC_WORKER_ENDED_COUNT_DESCRIPTION = "The total number of tasks ended by the Worker";
public static final String METRIC_WORKER_ENDED_DURATION = "worker.ended.duration";
public static final String METRIC_WORKER_ENDED_DURATION_DESCRIPTION = "Task run duration inside the Worker";
public static final String METRIC_WORKER_TRIGGER_DURATION = "worker.trigger.duration";
public static final String METRIC_WORKER_TRIGGER_DURATION_DESCRIPTION = "Trigger evaluation duration inside the Worker";
public static final String METRIC_WORKER_TRIGGER_RUNNING_COUNT = "worker.trigger.running.count";
public static final String METRIC_WORKER_TRIGGER_RUNNING_COUNT_DESCRIPTION = "The number of triggers currently evaluating inside the Worker";
public static final String METRIC_WORKER_TRIGGER_STARTED_COUNT = "worker.trigger.started.count";
public static final String METRIC_WORKER_TRIGGER_STARTED_COUNT_DESCRIPTION = "The total number of trigger evaluations started by the Worker";
public static final String METRIC_WORKER_TRIGGER_ENDED_COUNT = "worker.trigger.ended.count";
public static final String METRIC_WORKER_TRIGGER_ENDED_COUNT_DESCRIPTION = "The total number of trigger evaluations ended by the Worker";
public static final String METRIC_WORKER_TRIGGER_ERROR_COUNT = "worker.trigger.error.count";
public static final String METRIC_WORKER_TRIGGER_ERROR_COUNT_DESCRIPTION = "The total number of trigger evaluations that failed inside the Worker";
public static final String METRIC_WORKER_TRIGGER_EXECUTION_COUNT = "worker.trigger.execution.count";
public static final String METRIC_WORKER_TRIGGER_EXECUTION_COUNT_DESCRIPTION = "The total number of triggers evaluated by the Worker";
public static final String METRIC_WORKER_KILLED_COUNT = "worker.killed.count";
public static final String METRIC_WORKER_KILLED_COUNT_DESCRIPTION = "The total number of executions killed events received the Executor";
public static final String METRIC_EXECUTOR_THREAD_COUNT = "executor.thread.count";
public static final String METRIC_EXECUTOR_THREAD_COUNT_DESCRIPTION = "The number of executor threads";
public static final String METRIC_EXECUTOR_TASKRUN_CREATED_COUNT = "executor.taskrun.created.count";
public static final String METRIC_EXECUTOR_TASKRUN_CREATED_COUNT_DESCRIPTION = "The total number of tasks created by the Executor";
public static final String METRIC_EXECUTOR_TASKRUN_ENDED_COUNT = "executor.taskrun.ended.count";
public static final String METRIC_EXECUTOR_TASKRUN_ENDED_COUNT_DESCRIPTION = "The total number of tasks ended by the Executor";
public static final String METRIC_EXECUTOR_TASKRUN_ENDED_DURATION = "executor.taskrun.ended.duration";
public static final String METRIC_EXECUTOR_TASKRUN_ENDED_DURATION_DESCRIPTION = "Task duration inside the Executor";
public static final String METRIC_EXECUTOR_FLOWABLE_EXECUTION_COUNT = "executor.flowable.execution.count";
public static final String METRIC_EXECUTOR_FLOWABLE_EXECUTION_COUNT_DESCRIPTION = "The total number of flowable tasks executed by the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_STARTED_COUNT = "executor.execution.started.count";
public static final String METRIC_EXECUTOR_EXECUTION_STARTED_COUNT_DESCRIPTION = "The total number of executions started by the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_END_COUNT = "executor.execution.end.count";
public static final String METRIC_EXECUTOR_EXECUTION_END_COUNT_DESCRIPTION = "The total number of executions ended by the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_DURATION = "executor.execution.duration";
public static final String METRIC_EXECUTOR_EXECUTION_DURATION_DESCRIPTION = "Execution duration inside the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_MESSAGE_PROCESS_DURATION = "executor.execution.message.process";
public static final String METRIC_EXECUTOR_EXECUTION_MESSAGE_PROCESS_DURATION_DESCRIPTION = "Duration of a single execution message processed by the Executor";
public static final String METRIC_EXECUTOR_KILLED_COUNT = "executor.killed.count";
public static final String METRIC_EXECUTOR_KILLED_COUNT_DESCRIPTION = "The total number of executions killed events received the Executor";
public static final String METRIC_EXECUTOR_SLA_EXPIRED_COUNT = "executor.sla.expired.count";
public static final String METRIC_EXECUTOR_SLA_EXPIRED_COUNT_DESCRIPTION = "The total number of expired SLA (i.e. executions with SLA of type MAX_DURATION that took longer than the SLA) evaluated by the Executor";
public static final String METRIC_EXECUTOR_SLA_VIOLATION_COUNT = "executor.sla.violation.count";
public static final String METRIC_EXECUTOR_SLA_VIOLATION_COUNT_DESCRIPTION = "The total number of expired SLA (i.e. executions with SLA of type MAX_DURATION that took longer than the SLA) evaluated by the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_DELAY_CREATED_COUNT = "executor.execution.delay.created.count";
public static final String METRIC_EXECUTOR_EXECUTION_DELAY_CREATED_COUNT_DESCRIPTION = "The total number of execution delays created by the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_DELAY_ENDED_COUNT = "executor.execution.delay.ended.count";
public static final String METRIC_EXECUTOR_EXECUTION_DELAY_ENDED_COUNT_DESCRIPTION = "The total number of execution delays ended (resumed) by the Executor";
public static final String METRIC_EXECUTOR_WORKER_JOB_RESUBMIT_COUNT = "executor.worker.job.resubmit.count";
public static final String METRIC_EXECUTOR_WORKER_JOB_RESUBMIT_COUNT_DESCRIPTION = "The total number of worker jobs resubmitted to the Worker by the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_QUEUED_COUNT = "executor.execution.queued.count";
public static final String METRIC_EXECUTOR_EXECUTION_QUEUED_COUNT_DESCRIPTION = "The total number of executions queued by the Executor";
public static final String METRIC_EXECUTOR_EXECUTION_POPPED_COUNT = "executor.execution.popped.count";
public static final String METRIC_EXECUTOR_EXECUTION_POPPED_COUNT_DESCRIPTION = "The total number of executions popped by the Executor";
public static final String EXECUTOR_TASKRUN_NEXT_COUNT = "executor.taskrun.next.count";
public static final String EXECUTOR_TASKRUN_ENDED_COUNT = "executor.taskrun.ended.count";
public static final String EXECUTOR_TASKRUN_ENDED_DURATION = "executor.taskrun.ended.duration";
public static final String EXECUTOR_WORKERTASKRESULT_COUNT = "executor.workertaskresult.count";
public static final String EXECUTOR_EXECUTION_STARTED_COUNT = "executor.execution.started.count";
public static final String EXECUTOR_EXECUTION_END_COUNT = "executor.execution.end.count";
public static final String EXECUTOR_EXECUTION_DURATION = "executor.execution.duration";
public static final String METRIC_INDEXER_REQUEST_COUNT = "indexer.request.count";
public static final String METRIC_INDEXER_REQUEST_COUNT_DESCRIPTION = "Total number of batches of records received by the Indexer";
public static final String METRIC_INDEXER_REQUEST_DURATION = "indexer.request.duration";
public static final String METRIC_INDEXER_REQUEST_DURATION_DESCRIPTION = "Batch of records duration inside the Indexer";
public static final String METRIC_INDEXER_REQUEST_RETRY_COUNT = "indexer.request.retry.count";
public static final String METRIC_INDEXER_REQUEST_RETRY_COUNT_DESCRIPTION = "Total number of batches of records retried by the Indexer";
public static final String METRIC_INDEXER_SERVER_DURATION = "indexer.server.duration";
public static final String METRIC_INDEXER_SERVER_DURATION_DESCRIPTION = "Batch of records indexation duration";
public static final String METRIC_INDEXER_MESSAGE_FAILED_COUNT = "indexer.message.failed.count";
public static final String METRIC_INDEXER_MESSAGE_FAILED_COUNT_DESCRIPTION = "Total number of records which failed to be indexed by the Indexer";
public static final String METRIC_INDEXER_MESSAGE_IN_COUNT = "indexer.message.in.count";
public static final String METRIC_INDEXER_MESSAGE_IN_COUNT_DESCRIPTION = "Total number of records received by the Indexer";
public static final String METRIC_INDEXER_MESSAGE_OUT_COUNT = "indexer.message.out.count";
public static final String METRIC_INDEXER_MESSAGE_OUT_COUNT_DESCRIPTION = "Total number of records indexed by the Indexer";
public static final String METRIC_SCHEDULER_LOOP_COUNT = "scheduler.loop.count";
public static final String METRIC_SCHEDULER_LOOP_COUNT_DESCRIPTION = "Total number of evaluation loops executed by the Scheduler";
public static final String METRIC_SCHEDULER_TRIGGER_EVALUATION_DURATION = "scheduler.trigger.evaluation.duration";
public static final String METRIC_SCHEDULER_TRIGGER_EVALUATION_DURATION_DESCRIPTION = "Trigger evaluation duration for trigger executed inside the Scheduler (Schedulable triggers)";
public static final String METRIC_SCHEDULER_TRIGGER_COUNT = "scheduler.trigger.count";
public static final String METRIC_SCHEDULER_TRIGGER_COUNT_DESCRIPTION = "Total number of executions triggered by the Scheduler";
public static final String METRIC_SCHEDULER_TRIGGER_DELAY_DURATION = "scheduler.trigger.delay.duration";
public static final String METRIC_SCHEDULER_TRIGGER_DELAY_DURATION_DESCRIPTION = "Trigger delay duration inside the Scheduler";
public static final String METRIC_SCHEDULER_EVALUATE_COUNT = "scheduler.evaluate.count";
public static final String METRIC_SCHEDULER_EVALUATE_COUNT_DESCRIPTION = "Total number of triggers evaluated by the Scheduler";
public static final String METRIC_SCHEDULER_EXECUTION_LOCK_DURATION = "scheduler.execution.lock.duration";
public static final String METRIC_SCHEDULER_EXECUTION_LOCK_DURATION_DESCRIPTION = "Trigger lock duration waiting for an execution to be terminated";
public static final String METRIC_SCHEDULER_EXECUTION_MISSING_DURATION = "scheduler.execution.missing.duration";
public static final String METRIC_SCHEDULER_EXECUTION_MISSING_DURATION_DESCRIPTION = "Missing execution duration inside the Scheduler. A missing execution is an execution that was triggered by the Scheduler but not yet started by the Executor";
public static final String METRIC_SCHEDULER_EVALUATION_LOOP_DURATION = "scheduler.evaluation.loop.duration";
public static final String METRIC_SCHEDULER_EVALUATION_LOOP_DURATION_DESCRIPTION = "Trigger evaluation loop duration inside the Scheduler";
public static final String SCHEDULER_LOOP_COUNT = "scheduler.loop.count";
public static final String SCHEDULER_TRIGGER_COUNT = "scheduler.trigger.count";
public static final String SCHEDULER_TRIGGER_DELAY_DURATION = "scheduler.trigger.delay.duration";
public static final String SCHEDULER_EVALUATE_COUNT = "scheduler.evaluate.count";
public static final String SCHEDULER_EXECUTION_RUNNING_DURATION = "scheduler.execution.running.duration";
public static final String SCHEDULER_EXECUTION_MISSING_DURATION = "scheduler.execution.missing.duration";
public static final String METRIC_STREAMS_STATE_COUNT = "stream.state.count";
public static final String METRIC_STREAMS_STATE_COUNT_DESCRIPTION = "Number of Kafka Stream applications by state";
public static final String STREAMS_STATE_COUNT = "stream.state.count";
public static final String METRIC_JDBC_QUERY_DURATION = "jdbc.query.duration";
public static final String METRIC_JDBC_QUERY_DURATION_DESCRIPTION = "Duration of database queries";
public static final String JDBC_QUERY_DURATION = "jdbc.query.duration";
public static final String METRIC_QUEUE_BIG_MESSAGE_COUNT = "queue.big_message.count";
public static final String METRIC_QUEUE_BIG_MESSAGE_COUNT_DESCRIPTION = "Total number of big messages";
public static final String METRIC_QUEUE_PRODUCE_COUNT = "queue.produce.count";
public static final String METRIC_QUEUE_PRODUCE_COUNT_DESCRIPTION = "Total number of produced messages";
public static final String METRIC_QUEUE_RECEIVE_DURATION = "queue.receive.duration";
public static final String METRIC_QUEUE_RECEIVE_DURATION_DESCRIPTION = "Queue duration to receive and consume a batch of messages";
public static final String METRIC_QUEUE_POLL_SIZE = "queue.poll.size";
public static final String METRIC_QUEUE_POLL_SIZE_DESCRIPTION = "Size of a poll to the queue (message batch size)";
public static final String QUEUE_BIG_MESSAGE_COUNT = "queue.big_message.count";
public static final String TAG_TASK_TYPE = "task_type";
public static final String TAG_TRIGGER_TYPE = "trigger_type";
@@ -142,10 +73,6 @@ public class MetricRegistry {
public static final String TAG_WORKER_GROUP = "worker_group";
public static final String TAG_TENANT_ID = "tenant_id";
public static final String TAG_CLASS_NAME = "class_name";
public static final String TAG_EXECUTION_KILLED_TYPE = "execution_killed_type";
public static final String TAG_QUEUE_CONSUMER = "consumer";
public static final String TAG_QUEUE_CONSUMER_GROUP = "consumer_group";
public static final String TAG_QUEUE_TYPE = "queue_type";
@Inject
private MeterRegistry meterRegistry;
@@ -157,72 +84,47 @@ public class MetricRegistry {
* Tracks a monotonically increasing value.
*
* @param name The base metric name
* @param description The metric description
* @param tags MUST be an even number of arguments representing key/value pairs of tags.
* @return A new or existing counter.
*/
public Counter counter(String name, String description, String... tags) {
return Counter.builder(metricName(name))
.description(description)
.tags(tags)
.register(this.meterRegistry);
public Counter counter(String name, String... tags) {
return this.meterRegistry.counter(metricName(name), tags);
}
/**
* Register a gauge that reports the value of the {@link Number}.
*
* @param name Name of the gauge being registered.
* @param description The metric description
* @param number Thread-safe implementation of {@link Number} used to access the value.
* @param tags Sequence of dimensions for breaking down the name.
* @param <T> The type of the number from which the gauge value is extracted.
* @return The number that was passed in so the registration can be done as part of an assignment
* statement.
*/
public <T extends Number> T gauge(String name, String description, T number, String... tags) {
Gauge.builder(metricName(name), () -> number)
.description(description)
.tags(tags)
.register(this.meterRegistry);
return number;
public <T extends Number> T gauge(String name, T number, String... tags) {
return this.meterRegistry.gauge(metricName(name), Tags.of(tags), number);
}
/**
* Measures the time taken for short tasks and the count of these tasks.
*
* @param name The base metric name
* @param description The metric description
* @param tags MUST be an even number of arguments representing key/value pairs of tags.
* @return A new or existing timer.
*/
public Timer timer(String name, String description, String... tags) {
return Timer.builder(metricName(name))
.description(description)
.tags(tags)
.register(this.meterRegistry);
public Timer timer(String name, String... tags) {
return this.meterRegistry.timer(metricName(name), tags);
}
/**
* Measures the distribution of samples.
*
* @param name The base metric name
* @param description The metric description
* @param tags MUST be an even number of arguments representing key/value pairs of tags.
* @return A new or existing distribution summary.
*/
public DistributionSummary summary(String name, String description, String... tags) {
return DistributionSummary.builder(metricName(name))
.description(description)
.tags(tags)
.register(this.meterRegistry);
}
/**
* Search for an existing Meter in the meter registry
* @param name The base metric name
*/
public Search find(String name) {
return this.meterRegistry.find(metricName(name));
public DistributionSummary summary(String name, String... tags) {
return this.meterRegistry.summary(metricName(name), tags);
}
/**
@@ -358,7 +260,7 @@ public class MetricRegistry {
* Return tags for current {@link AbstractTrigger}
*
* @param trigger the current Trigger
* @return tags to apply to metrics
* @return tags to applied to metrics
*/
public String[] tags(AbstractTrigger trigger) {
return new String[]{
@@ -408,19 +310,6 @@ public class MetricRegistry {
);
}
/**
* Return tags for current {@link ExecutionKilled}
*
* @param executionKilled the current Trigger
* @return tags to apply to metrics
*/
public String[] tags(ExecutionKilled executionKilled) {
var baseTags = new String[]{
TAG_EXECUTION_KILLED_TYPE, executionKilled.getType(),
};
return executionKilled.getTenantId() == null ? baseTags : ArrayUtils.addAll(baseTags, TAG_TENANT_ID, executionKilled.getTenantId());
}
/**
* Return globals tags

View File

@@ -20,7 +20,6 @@ public record Label(@NotNull String key, @NotNull String value) {
public static final String REPLAY = SYSTEM_PREFIX + "replay";
public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
public static final String TEST = SYSTEM_PREFIX + "test";
/**
* Static helper method for converting a list of labels to a nested map.

View File

@@ -3,10 +3,8 @@ package io.kestra.core.models;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
import io.kestra.core.exceptions.InvalidQueryFiltersException;
import io.kestra.core.models.dashboards.filters.*;
import io.kestra.core.utils.Enums;
import java.util.ArrayList;
import lombok.Builder;
import java.util.Arrays;
@@ -45,13 +43,9 @@ public record QueryFilter(
STARTS_WITH,
ENDS_WITH,
CONTAINS,
REGEX,
PREFIX
REGEX;
}
private List<Object> asValues(Object value) {
return value instanceof String valueStr ? Arrays.asList(valueStr.split(",")) : (List<Object>) value;
}
@SuppressWarnings("unchecked")
public <T extends Enum<T>> AbstractFilter<T> toDashboardFilterBuilder(T field, Object value) {
@@ -69,9 +63,9 @@ public record QueryFilter(
case LESS_THAN_OR_EQUAL_TO:
return LessThanOrEqualTo.<T>builder().field(field).value(value).build();
case IN:
return In.<T>builder().field(field).values(asValues(value)).build();
return In.<T>builder().field(field).values((List<Object>) value).build();
case NOT_IN:
return NotIn.<T>builder().field(field).values(asValues(value)).build();
return NotIn.<T>builder().field(field).values((List<Object>) value).build();
case STARTS_WITH:
return StartsWith.<T>builder().field(field).value(value.toString()).build();
case ENDS_WITH:
@@ -80,8 +74,6 @@ public record QueryFilter(
return Contains.<T>builder().field(field).value(value.toString()).build();
case REGEX:
return Regex.<T>builder().field(field).value(value.toString()).build();
case PREFIX:
return Regex.<T>builder().field(field).value("^" + value.toString().replace(".", "\\.") + "(?:\\..+)?$").build();
default:
throw new IllegalArgumentException("Unsupported operation: " + this.operation);
}
@@ -91,7 +83,7 @@ public record QueryFilter(
QUERY("q") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS);
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.REGEX);
}
},
SCOPE("scope") {
@@ -103,7 +95,7 @@ public record QueryFilter(
NAMESPACE("namespace") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN);
}
},
LABELS("labels") {
@@ -115,19 +107,19 @@ public record QueryFilter(
FLOW_ID("flowId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.IN, Op.NOT_IN);
}
},
START_DATE("startDate") {
@Override
public List<Op> supportedOp() {
return List.of(Op.GREATER_THAN_OR_EQUAL_TO, Op.GREATER_THAN, Op.LESS_THAN_OR_EQUAL_TO, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
return List.of(Op.GREATER_THAN, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
}
},
END_DATE("endDate") {
@Override
public List<Op> supportedOp() {
return List.of(Op.GREATER_THAN_OR_EQUAL_TO, Op.GREATER_THAN, Op.LESS_THAN_OR_EQUAL_TO, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
return List.of(Op.GREATER_THAN, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
}
},
STATE("state") {
@@ -139,7 +131,8 @@ public record QueryFilter(
TIME_RANGE("timeRange") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS);
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH,
Op.ENDS_WITH, Op.IN, Op.NOT_IN, Op.REGEX);
}
},
TRIGGER_EXECUTION_ID("triggerExecutionId") {
@@ -218,7 +211,7 @@ public record QueryFilter(
@Override
public List<Field> supportedField() {
return List.of(
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE, Field.TIME_RANGE,
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
Field.NAMESPACE
);
@@ -227,8 +220,8 @@ public record QueryFilter(
LOG {
@Override
public List<Field> supportedField() {
return List.of(Field.QUERY, Field.SCOPE, Field.NAMESPACE, Field.START_DATE,
Field.END_DATE, Field.FLOW_ID, Field.TRIGGER_ID, Field.MIN_LEVEL
return List.of(Field.NAMESPACE, Field.START_DATE, Field.END_DATE,
Field.FLOW_ID, Field.TRIGGER_ID, Field.MIN_LEVEL
);
}
},
@@ -249,8 +242,7 @@ public record QueryFilter(
TRIGGER {
@Override
public List<Field> supportedField() {
return List.of(Field.QUERY, Field.SCOPE, Field.NAMESPACE, Field.WORKER_ID, Field.FLOW_ID,
Field.START_DATE, Field.END_DATE, Field.TRIGGER_ID
return List.of(Field.QUERY, Field.NAMESPACE, Field.WORKER_ID, Field.FLOW_ID
);
}
};
@@ -297,26 +289,4 @@ public record QueryFilter(
public record Operation(String name, String value) {
}
public static void validateQueryFilters(List<QueryFilter> filters, Resource resource){
if (filters == null) {
return;
}
List<String> errors = new ArrayList<>();
filters.forEach(filter -> {
if (!filter.field().supportedOp().contains(filter.operation())) {
errors.add("Operation %s is not supported for field %s. Supported operations are %s".formatted(
filter.operation(), filter.field().name(),
filter.field().supportedOp().stream().map(Op::name).collect(Collectors.joining(", "))));
}
if (!resource.supportedField().contains(filter.field())){
errors.add("Field %s is not supported for resource %s. Supported fields are %s".formatted(
filter.field().name(), resource.name(),
resource.supportedField().stream().map(Field::name).collect(Collectors.joining(", "))));
}
});
if (!errors.isEmpty()){
throw new InvalidQueryFiltersException(errors);
}
}
}

View File

@@ -15,8 +15,6 @@ import jakarta.validation.constraints.NotNull;
@NoArgsConstructor
public class Setting {
public static final String INSTANCE_UUID = "instance.uuid";
public static final String INSTANCE_VERSION = "instance.version";
@NotNull
private String key;

View File

@@ -22,10 +22,6 @@ import java.util.stream.Stream;
@Jacksonized
@Introspected
public class FlowUsage {
// Namespace used for 'Getting Started' flows.
private static final String TUTORIAL_NAMESPACE = "tutorial";
private final Integer count;
private final Long namespacesCount;
private final Map<String, Long> taskTypeCount;
@@ -41,13 +37,12 @@ public class FlowUsage {
}
public static FlowUsage of(List<Flow> flows) {
List<Flow> filtered = flows.stream().filter(flow -> !TUTORIAL_NAMESPACE.equals(flow.getNamespace())).toList();
return FlowUsage.builder()
.count(count(filtered))
.namespacesCount(namespacesCount(filtered))
.taskTypeCount(taskTypeCount(filtered))
.triggerTypeCount(triggerTypeCount(filtered))
.taskRunnerTypeCount(taskRunnerTypeCount(filtered))
.count(count(flows))
.namespacesCount(namespacesCount(flows))
.taskTypeCount(taskTypeCount(flows))
.triggerTypeCount(triggerTypeCount(flows))
.taskRunnerTypeCount(taskRunnerTypeCount(flows))
.build();
}

View File

@@ -1,4 +0,0 @@
package io.kestra.core.models.collectors;
public record PluginMetric(String type, double count, double totalTime, double meanTime){
}

View File

@@ -2,7 +2,7 @@ package io.kestra.core.models.collectors;
import io.kestra.core.models.ServerType;
import io.micronaut.core.annotation.Introspected;
import jakarta.annotation.Nullable;
import io.micronaut.core.annotation.Nullable;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import lombok.AllArgsConstructor;
@@ -13,7 +13,6 @@ import lombok.extern.jackson.Jacksonized;
import java.time.Instant;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Set;
@SuperBuilder(toBuilder = true)
@@ -67,8 +66,4 @@ public class Usage {
@Valid
@Nullable
private ServiceUsage services;
@Valid
@Nullable
private List<PluginMetric> pluginMetrics;
}

View File

@@ -1,10 +1,7 @@
package io.kestra.core.models.dashboards;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
@@ -24,11 +21,6 @@ public class ChartOption {
private String description;
@Builder.Default
@Min(1)
@Max(12)
private int width = 6;
public List<String> neededColumns() {
return Collections.emptyList();
}

View File

@@ -4,7 +4,6 @@ import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.dashboards.filters.AbstractFilter;
import io.kestra.core.repositories.QueryBuilderInterface;
import io.kestra.plugin.core.dashboard.data.IData;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
@@ -25,12 +24,13 @@ import java.util.Set;
@NoArgsConstructor
@Plugin
@EqualsAndHashCode
public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F>> implements io.kestra.core.models.Plugin, IData<F> {
public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F>> implements io.kestra.core.models.Plugin {
@NotNull
@NotBlank
@Pattern(regexp = "\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
private String type;
private Map<String, C> columns;
@Setter
@@ -42,10 +42,8 @@ public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F
return Collections.emptySet();
}
public void updateWhereWithGlobalFilters(List<QueryFilter> queryFilterList, ZonedDateTime startDate, ZonedDateTime endDate) {
this.where = whereWithGlobalFilters(queryFilterList, startDate, endDate, this.where);
}
public abstract Class<? extends QueryBuilderInterface<F>> repositoryClass();
public abstract void setGlobalFilter(List<QueryFilter> queryFilterList, ZonedDateTime startDate, ZonedDateTime endDate);
}

Some files were not shown because too many files have changed in this diff Show More