mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 14:00:23 -05:00
Compare commits
291 Commits
retry-flow
...
fix-execut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01af20ad6d | ||
|
|
fa5108a6e9 | ||
|
|
a566c527cc | ||
|
|
d0c3c1daa0 | ||
|
|
5542aeef01 | ||
|
|
cf706a670e | ||
|
|
f91f28f2a5 | ||
|
|
db49b03269 | ||
|
|
e96da8ee43 | ||
|
|
e501fcb8cb | ||
|
|
9a8e84d460 | ||
|
|
f2b290dc32 | ||
|
|
a1b5d1b8e8 | ||
|
|
f541d77f0d | ||
|
|
57ad7bdd96 | ||
|
|
6ee910a2ec | ||
|
|
289a159dfd | ||
|
|
7e24495f71 | ||
|
|
aff8fde79d | ||
|
|
104d1c859c | ||
|
|
e20da7d5be | ||
|
|
c0f4bda10e | ||
|
|
b608c5a3b8 | ||
|
|
69c12874e4 | ||
|
|
5c24308e71 | ||
|
|
8fee5fc172 | ||
|
|
f633766bb9 | ||
|
|
f38b57ca4b | ||
|
|
9c3bf2c26b | ||
|
|
3a4e683685 | ||
|
|
a50c2c9ceb | ||
|
|
e4c35806cf | ||
|
|
2431567ee0 | ||
|
|
f6a496fb74 | ||
|
|
612e17a510 | ||
|
|
2814f8f159 | ||
|
|
279442bc8e | ||
|
|
d671f04de2 | ||
|
|
b7262f8f01 | ||
|
|
af00ee94f9 | ||
|
|
b628c3a218 | ||
|
|
0ed8193314 | ||
|
|
513e302bb2 | ||
|
|
296fb2fb7a | ||
|
|
5d883e0850 | ||
|
|
8d31d5407c | ||
|
|
f0720412d9 | ||
|
|
cb3ff02057 | ||
|
|
06ec05026e | ||
|
|
6a0929a050 | ||
|
|
7f714c0ffb | ||
|
|
83b4d285b1 | ||
|
|
bc137f2895 | ||
|
|
04052f3cbf | ||
|
|
08875d5292 | ||
|
|
189ad6090a | ||
|
|
02c896c3c5 | ||
|
|
154754e19c | ||
|
|
f2c3489f70 | ||
|
|
b741f7e3e7 | ||
|
|
4d931df726 | ||
|
|
ff742bfdd4 | ||
|
|
f0451df46f | ||
|
|
63b8e62b3f | ||
|
|
d243ba65e9 | ||
|
|
8771aa86a6 | ||
|
|
8a1cf71b63 | ||
|
|
4b9de17824 | ||
|
|
9bc2a9f8f2 | ||
|
|
0fce77cc8d | ||
|
|
04779e60c3 | ||
|
|
615502c58b | ||
|
|
08ac558e46 | ||
|
|
b43fd14625 | ||
|
|
c5d1e5bd38 | ||
|
|
4d89c5a9e1 | ||
|
|
5154127643 | ||
|
|
d205159e7e | ||
|
|
4f05198ae3 | ||
|
|
799500d4d1 | ||
|
|
9066063037 | ||
|
|
3b0f231eb5 | ||
|
|
f5a0dcc024 | ||
|
|
5c079b8b6b | ||
|
|
343d6b4eb9 | ||
|
|
d34d547412 | ||
|
|
7a542a24e2 | ||
|
|
5b1db68752 | ||
|
|
5b07b643d3 | ||
|
|
0e059772e4 | ||
|
|
f72e294e54 | ||
|
|
98dd884149 | ||
|
|
26c4f080fd | ||
|
|
01293de91c | ||
|
|
892b69f10e | ||
|
|
6f70d4d275 | ||
|
|
b41d2e456f | ||
|
|
5ec08eda8c | ||
|
|
7ed6b883ff | ||
|
|
eb166c9321 | ||
|
|
57aad1b931 | ||
|
|
60fe5b5c76 | ||
|
|
98c69b53bb | ||
|
|
d5d38559b4 | ||
|
|
4273ddc4f6 | ||
|
|
980c573a30 | ||
|
|
27109015f9 | ||
|
|
eba7d4f375 | ||
|
|
655a1172ee | ||
|
|
6e49a85acd | ||
|
|
4515bad6bd | ||
|
|
226dbd30c9 | ||
|
|
6b0c190edc | ||
|
|
c64df40a36 | ||
|
|
8af22d1bb2 | ||
|
|
b294457953 | ||
|
|
02d9c589fb | ||
|
|
6340d1c72f | ||
|
|
f439bd53d7 | ||
|
|
e54e3d5308 | ||
|
|
d084f2cd26 | ||
|
|
015960c78e | ||
|
|
39a09ecb67 | ||
|
|
45ce878d65 | ||
|
|
3ee647b9a8 | ||
|
|
7a7cb006bf | ||
|
|
911e6d5705 | ||
|
|
bf1458dde7 | ||
|
|
bd31e0eebd | ||
|
|
de02e4dd70 | ||
|
|
ec235b91fc | ||
|
|
ff1efa9958 | ||
|
|
e43c8ce387 | ||
|
|
2bd4e82b42 | ||
|
|
e63d6d1d86 | ||
|
|
a9752e65f2 | ||
|
|
151c56f3de | ||
|
|
d562ce3e65 | ||
|
|
681386a05b | ||
|
|
51ddfaf155 | ||
|
|
caee0a293f | ||
|
|
ba92880fa3 | ||
|
|
36b27510fb | ||
|
|
da2907e096 | ||
|
|
9b40665e64 | ||
|
|
0d35b5b355 | ||
|
|
339eb79854 | ||
|
|
0ee753529b | ||
|
|
84668fdfb9 | ||
|
|
9802f046e8 | ||
|
|
848b4d6577 | ||
|
|
1159bc5eb9 | ||
|
|
9b7ef37d14 | ||
|
|
89dfd18658 | ||
|
|
13ed2252bc | ||
|
|
c73b103bb3 | ||
|
|
396a077942 | ||
|
|
68e6fa2a4c | ||
|
|
a18748b3b2 | ||
|
|
236fcff7b4 | ||
|
|
cbbd697732 | ||
|
|
6b84737651 | ||
|
|
6ee7ecbd6b | ||
|
|
504f925085 | ||
|
|
7d37d2be93 | ||
|
|
94751a3b21 | ||
|
|
ba83b91680 | ||
|
|
56f62fb89f | ||
|
|
a0efe4b1f3 | ||
|
|
9af6338ae5 | ||
|
|
d53b933bdf | ||
|
|
a35c2816c5 | ||
|
|
d88eb9974c | ||
|
|
af3d5a384a | ||
|
|
e9ad352ccf | ||
|
|
1a95b83fb7 | ||
|
|
095939ff7a | ||
|
|
094f523874 | ||
|
|
c7efb2514a | ||
|
|
887537d8c1 | ||
|
|
0630b741b9 | ||
|
|
d2b7e723e1 | ||
|
|
080ceadf37 | ||
|
|
a89d902bc2 | ||
|
|
e2ef7d412a | ||
|
|
54c667ec4b | ||
|
|
1c53758d33 | ||
|
|
d092556bc2 | ||
|
|
308106d532 | ||
|
|
8fe8f96278 | ||
|
|
a5cad6d87c | ||
|
|
199d67fbe2 | ||
|
|
558a2e3f01 | ||
|
|
e1d2c30e54 | ||
|
|
700c6de411 | ||
|
|
2b838a5012 | ||
|
|
617daa79db | ||
|
|
1791127acb | ||
|
|
7feb571fb3 | ||
|
|
a315bd0e1c | ||
|
|
e2ac1e7e98 | ||
|
|
c6f40eff52 | ||
|
|
ccd42f7a1a | ||
|
|
ef08c8ac30 | ||
|
|
7b527c85a9 | ||
|
|
d121867066 | ||
|
|
a084a9f6f0 | ||
|
|
f6fff11081 | ||
|
|
3d5015938f | ||
|
|
951c93cedb | ||
|
|
9c06b37989 | ||
|
|
a916a03fdd | ||
|
|
4e728da331 | ||
|
|
166a3932c9 | ||
|
|
0a21971bbf | ||
|
|
8c4d7c0f9e | ||
|
|
b709913071 | ||
|
|
5be401d23c | ||
|
|
bb9f4be8c2 | ||
|
|
01e8e46b77 | ||
|
|
d00f4b0768 | ||
|
|
279f59c874 | ||
|
|
d897509726 | ||
|
|
0d592342af | ||
|
|
fc690bf7cd | ||
|
|
0a1b919863 | ||
|
|
2f4e981a29 | ||
|
|
5e7739432e | ||
|
|
8aba863b8c | ||
|
|
7eaa43c50f | ||
|
|
267ff78bfe | ||
|
|
7272cfe01f | ||
|
|
91e2fdb2cc | ||
|
|
a236688be6 | ||
|
|
81763d40ae | ||
|
|
677efb6739 | ||
|
|
b35924fef1 | ||
|
|
9dd93294b6 | ||
|
|
fac6dfe9a0 | ||
|
|
3bf9764505 | ||
|
|
c35cea5d19 | ||
|
|
4d8e9479f1 | ||
|
|
3f24e8e838 | ||
|
|
7175fcb666 | ||
|
|
2ddfa13b1b | ||
|
|
ba2a5dfec8 | ||
|
|
f84441dac7 | ||
|
|
433b788e4a | ||
|
|
65c5fd6331 | ||
|
|
421ab40276 | ||
|
|
efb2779693 | ||
|
|
74d371c0ca | ||
|
|
90a7869020 | ||
|
|
d9ccb50b0f | ||
|
|
aea0b87ef8 | ||
|
|
9a144fc3fe | ||
|
|
ddd9cebc63 | ||
|
|
1bebbb9b73 | ||
|
|
8de4dc867e | ||
|
|
fc49694e76 | ||
|
|
152300abae | ||
|
|
1ff5dda4e1 | ||
|
|
84f9b8876d | ||
|
|
575955567f | ||
|
|
d6d2580b45 | ||
|
|
070e54b902 | ||
|
|
829ca4380f | ||
|
|
381c7a75ad | ||
|
|
1688c489a9 | ||
|
|
93ccbf5f9b | ||
|
|
ac1cb235e5 | ||
|
|
9d3d3642e8 | ||
|
|
3d306a885e | ||
|
|
ef193c5774 | ||
|
|
d0f46169f4 | ||
|
|
3005ab527c | ||
|
|
688e2af12b | ||
|
|
4c0a05f484 | ||
|
|
108f8fc2c7 | ||
|
|
8b81a37559 | ||
|
|
9222f97d63 | ||
|
|
43e3591417 | ||
|
|
438dc9ecf6 | ||
|
|
7292837c58 | ||
|
|
7fa93d7764 | ||
|
|
a3c9b35b25 | ||
|
|
2c03101422 | ||
|
|
7ee2cca3ae | ||
|
|
ddb48a4384 | ||
|
|
a62c5ab637 | ||
|
|
1b934d31f1 |
29
.github/actions/plugins-list/action.yml
vendored
29
.github/actions/plugins-list/action.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: 'Load Kestra Plugin List'
|
||||
description: 'Composite action to load list of plugins'
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
plugin-file:
|
||||
description: "File of the plugins"
|
||||
default: './.plugins'
|
||||
required: true
|
||||
outputs:
|
||||
plugins:
|
||||
description: "List of all Kestra plugins"
|
||||
value: ${{ steps.plugins.outputs.plugins }}
|
||||
repositories:
|
||||
description: "List of all Kestra repositories of plugins"
|
||||
value: ${{ steps.plugins.outputs.repositories }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get Plugins List
|
||||
id: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | cut -d':' -f2- | xargs || echo '');
|
||||
REPOSITORIES=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | cut -d':' -f1 | uniq | sort | xargs || echo '')
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
echo "repositories=$REPOSITORIES" >> $GITHUB_OUTPUT
|
||||
20
.github/actions/setup-vars/action.yml
vendored
20
.github/actions/setup-vars/action.yml
vendored
@@ -1,20 +0,0 @@
|
||||
name: 'Setup vars'
|
||||
description: 'Composite action to setup common vars'
|
||||
outputs:
|
||||
tag:
|
||||
description: "Git tag"
|
||||
value: ${{ steps.vars.outputs.tag }}
|
||||
commit:
|
||||
description: "Git commit"
|
||||
value: ${{ steps.vars.outputs.commit }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
# Setup vars
|
||||
- name: Set variables
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "commit=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_OUTPUT
|
||||
4
.github/workflows/auto-translate-ui-keys.yml
vendored
4
.github/workflows/auto-translate-ui-keys.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
|
||||
15
.github/workflows/e2e-scheduling.yml
vendored
Normal file
15
.github/workflows/e2e-scheduling.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: 'E2E tests scheduling'
|
||||
# 'New E2E tests implementation started by Roman. Based on playwright in npm UI project, tests Kestra OSS develop docker image. These tests are written from zero, lets make them unflaky from the start!.'
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *" # Every hour
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
noInputYet:
|
||||
description: 'not input yet.'
|
||||
required: false
|
||||
type: string
|
||||
default: "no input"
|
||||
jobs:
|
||||
e2e:
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-e2e-tests.yml@main
|
||||
86
.github/workflows/e2e.yml
vendored
86
.github/workflows/e2e.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: 'E2E tests revival'
|
||||
description: 'New E2E tests implementation started by Roman. Based on playwright in npm UI project, tests Kestra OSS develop docker image. These tests are written from zero, lets make them unflaky from the start!.'
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *" # Every hour
|
||||
workflow_call:
|
||||
inputs:
|
||||
noInputYet:
|
||||
description: 'not input yet.'
|
||||
required: false
|
||||
type: string
|
||||
default: "no input"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
noInputYet:
|
||||
description: 'not input yet.'
|
||||
required: false
|
||||
type: string
|
||||
default: "no input"
|
||||
jobs:
|
||||
check:
|
||||
timeout-minutes: 15
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
steps:
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
- name: Checkout kestra
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: kestra
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
- name: Install Npm dependencies
|
||||
run: |
|
||||
cd kestra/ui
|
||||
npm i
|
||||
npx playwright install --with-deps chromium
|
||||
|
||||
- name: Run E2E Tests
|
||||
run: |
|
||||
cd kestra
|
||||
sh build-and-start-e2e-tests.sh
|
||||
|
||||
- name: Upload Playwright Report as Github artifact
|
||||
# 'With this report, you can analyze locally the results of the tests. see https://playwright.dev/docs/ci-intro#html-report'
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ !cancelled() }}
|
||||
with:
|
||||
name: playwright-report
|
||||
path: kestra/ui/playwright-report/
|
||||
retention-days: 7
|
||||
# Allure check
|
||||
# TODO I don't know what it should do
|
||||
# - uses: rlespinasse/github-slug-action@v5
|
||||
# name: Allure - Generate slug variables
|
||||
#
|
||||
# - name: Allure - Publish report
|
||||
# uses: andrcuns/allure-publish-action@v2.9.0
|
||||
# if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
# continue-on-error: true
|
||||
# env:
|
||||
# GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
# JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
# with:
|
||||
# storageType: gcs
|
||||
# resultsGlob: "**/build/allure-results"
|
||||
# bucket: internal-kestra-host
|
||||
# baseUrl: "https://internal.dev.kestra.io"
|
||||
# prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
# copyLatest: true
|
||||
# ignoreMissingResults: true
|
||||
@@ -1,10 +1,10 @@
|
||||
name: Run Gradle Release
|
||||
run-name: "Releasing Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
name: Create new release branch
|
||||
run-name: "Create new release branch Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
description: 'The release version (e.g., 0.21.0)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
@@ -23,8 +23,8 @@ jobs:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
run: |
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$"
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -38,15 +38,8 @@ jobs:
|
||||
fetch-depth: 0
|
||||
path: kestra
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
- uses: kestra-io/actions/composite/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
@@ -78,7 +71,6 @@ jobs:
|
||||
git checkout develop;
|
||||
|
||||
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
|
||||
# -SNAPSHOT qualifier maybe used to test release-candidates
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
@@ -89,4 +81,4 @@ jobs:
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}"
|
||||
fi
|
||||
fi
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
description: 'The release version (e.g., 0.21.0)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
@@ -25,25 +25,17 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
- uses: kestra-io/actions/composite/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
uses: kestra-io/actions/composite/kestra-oss/kestra-oss-plugins-list@main
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
@@ -60,7 +52,7 @@ jobs:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
@@ -73,10 +65,10 @@ jobs:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
--dry-run \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
uses: kestra-io/actions/composite/kestra-oss/kestra-oss-plugins-list@main
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--yes \
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--dry-run \
|
||||
@@ -1,5 +1,5 @@
|
||||
name: Set Version and Tag
|
||||
run-name: "Set version and Tag Kestra to ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
name: Start release
|
||||
run-name: "Start release of Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@@ -7,17 +7,26 @@ on:
|
||||
description: 'The release version (e.g., 0.21.1)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
env:
|
||||
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
|
||||
jobs:
|
||||
release:
|
||||
name: Release Kestra
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/heads/releases/v')
|
||||
steps:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
- name: Parse and Check Inputs
|
||||
id: parse-and-check-inputs
|
||||
run: |
|
||||
CURRENT_BRANCH="${{ github.ref_name }}"
|
||||
if ! [[ "$CURRENT_BRANCH" == "develop" ]]; then
|
||||
echo "You can only run this workflow on develop, but you ran it on $CURRENT_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)(\.[0-9]+)(-rc[0-9])?(-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
|
||||
exit 1
|
||||
@@ -25,13 +34,8 @@ jobs:
|
||||
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
|
||||
|
||||
CURRENT_BRANCH="$GITHUB_REF"
|
||||
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
|
||||
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
|
||||
echo "release_branch=${RELEASE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Checkout
|
||||
- name: Checkout
|
||||
@@ -39,6 +43,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
ref: ${{ steps.parse-and-check-inputs.outputs.release_branch }}
|
||||
|
||||
# Configure
|
||||
- name: Git - Configure
|
||||
@@ -47,7 +52,7 @@ jobs:
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
- name: Start release by updating version and pushing a new tag
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
86
.github/workflows/main-build.yml
vendored
Normal file
86
.github/workflows/main-build.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: Main Workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- releases/*
|
||||
- develop
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: 'Skip test'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'false'
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: Backend tests
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
frontend-tests:
|
||||
name: Frontend tests
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
publish-develop-docker:
|
||||
name: Publish Docker
|
||||
needs: [backend-tests, frontend-tests]
|
||||
if: "!failure() && !cancelled() && github.ref == 'refs/heads/develop'"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-docker.yml@main
|
||||
with:
|
||||
plugin-version: 'LATEST-SNAPSHOT'
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
||||
|
||||
publish-develop-maven:
|
||||
name: Publish develop Maven
|
||||
needs: [ backend-tests, frontend-tests ]
|
||||
if: "!failure() && !cancelled() && github.ref == 'refs/heads/develop'"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-maven.yml@main
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [publish-develop-docker, publish-develop-maven]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Trigger EE Workflow
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/kestra-ee
|
||||
event-type: "oss-updated"
|
||||
|
||||
# Slack
|
||||
- name: Slack - Notification
|
||||
if: ${{ failure() && env.SLACK_WEBHOOK_URL != 0 && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') }}
|
||||
uses: kestra-io/actions/composite/slack-status@main
|
||||
with:
|
||||
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
83
.github/workflows/main.yml
vendored
83
.github/workflows/main.yml
vendored
@@ -1,83 +0,0 @@
|
||||
name: Main Workflow
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: 'Skip test'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'false'
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
required: false
|
||||
type: string
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/*
|
||||
- develop
|
||||
tags:
|
||||
- v*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Execute tests
|
||||
uses: ./.github/workflows/workflow-test.yml
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
report-status: false
|
||||
|
||||
release:
|
||||
name: Release
|
||||
needs: [tests]
|
||||
if: "!failure() && !cancelled() && !startsWith(github.ref, 'refs/heads/releases')"
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
with:
|
||||
plugin-version: ${{ inputs.plugin-version != '' && inputs.plugin-version || (github.ref == 'refs/heads/develop' && 'LATEST-SNAPSHOT' || 'LATEST') }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- release
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
- name: Trigger EE Workflow
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/kestra-ee
|
||||
event-type: "oss-updated"
|
||||
|
||||
|
||||
# Slack
|
||||
- name: Slack - Notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR" # _int_git channel
|
||||
49
.github/workflows/pre-release.yml
vendored
Normal file
49
.github/workflows/pre-release.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Pre Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-build-artifacts.yml@main
|
||||
|
||||
backend-tests:
|
||||
name: Backend tests
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
frontend-tests:
|
||||
name: Frontend tests
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
publish-maven:
|
||||
name: Publish Maven
|
||||
needs: [ backend-tests, frontend-tests ]
|
||||
if: "!failure() && !cancelled()"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-maven.yml@main
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
publish-github:
|
||||
name: Github Release
|
||||
needs: [build-artifacts, backend-tests, frontend-tests]
|
||||
if: "!failure() && !cancelled()"
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-github.yml@main
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
@@ -3,11 +3,11 @@ name: Pull Request - Delete Docker
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
|
||||
# TODO import a reusable one
|
||||
jobs:
|
||||
publish:
|
||||
name: Pull Request - Delete Docker
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
|
||||
if: github.repository == 'kestra-io/kestra' # prevent running on forks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dataaxiom/ghcr-cleanup-action@v1
|
||||
33
.github/workflows/pull-request.yml
vendored
33
.github/workflows/pull-request.yml
vendored
@@ -2,17 +2,12 @@ name: Pull Request Workflow
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ********************************************************************************************************************
|
||||
# File changes detection
|
||||
# ********************************************************************************************************************
|
||||
file-changes:
|
||||
if: ${{ github.event.pull_request.draft == false }}
|
||||
name: File changes detection
|
||||
@@ -33,14 +28,11 @@ jobs:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# ********************************************************************************************************************
|
||||
# Tests
|
||||
# ********************************************************************************************************************
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: [file-changes]
|
||||
if: "needs.file-changes.outputs.ui == 'true'"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -49,7 +41,7 @@ jobs:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true'"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -58,21 +50,8 @@ jobs:
|
||||
|
||||
e2e-tests:
|
||||
name: E2E - Tests
|
||||
uses: ./.github/workflows/e2e.yml
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-e2e-tests.yml@main
|
||||
|
||||
end:
|
||||
name: End
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs: [frontend, backend]
|
||||
steps:
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR"
|
||||
generate-pull-request-docker-image:
|
||||
name: Generate PR docker image
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-pullrequest-publish-docker.yml@main
|
||||
|
||||
34
.github/workflows/release-docker.yml
vendored
Normal file
34
.github/workflows/release-docker.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: Publish docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retag-latest:
|
||||
description: 'Retag latest Docker images'
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
retag-lts:
|
||||
description: 'Retag LTS Docker images'
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
|
||||
jobs:
|
||||
publish-docker:
|
||||
name: Publish Docker
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-docker.yml@main
|
||||
with:
|
||||
plugin-version: ${{ inputs.plugin-version }}
|
||||
retag-latest: ${{ inputs.retag-latest }}
|
||||
retag-lts: ${{ inputs.retag-lts }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
30
.github/workflows/vulnerabilities-check.yml
vendored
30
.github/workflows/vulnerabilities-check.yml
vendored
@@ -21,13 +21,6 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
@@ -70,15 +63,8 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
- uses: kestra-io/actions/composite/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: false
|
||||
@@ -87,7 +73,7 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.33.0
|
||||
uses: aquasecurity/trivy-action@0.33.1
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: 'template'
|
||||
@@ -115,24 +101,16 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
- uses: kestra-io/actions/composite/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: false
|
||||
node-enabled: false
|
||||
caches-enabled: true
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.33.0
|
||||
uses: aquasecurity/trivy-action@0.33.1
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
|
||||
142
.github/workflows/workflow-backend-test.yml
vendored
142
.github/workflows/workflow-backend-test.yml
vendored
@@ -1,142 +0,0 @@
|
||||
name: Backend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
SONAR_TOKEN:
|
||||
description: 'Sonar Token'
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
name: Checkout - Current ref
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Setup - Start docker compose
|
||||
shell: bash
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
|
||||
# Gradle check
|
||||
- name: Gradle - Build
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
reporter: java-junit
|
||||
path: '**/build/test-results/test/TEST-*.xml'
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
if: env.SONAR_TOKEN != ''
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
shell: bash
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# GCP
|
||||
- name: GCP - Auth with unit test account
|
||||
id: auth
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
uses: "google-github-actions/auth@v3"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- name: GCP - Setup Cloud SDK
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
uses: "google-github-actions/setup-gcloud@v3"
|
||||
|
||||
# Allure check
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
name: Allure - Generate slug variables
|
||||
|
||||
- name: Allure - Publish report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: Jacoco - Copy reports
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# Codecov
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
80
.github/workflows/workflow-build-artifacts.yml
vendored
80
.github/workflows/workflow-build-artifacts.yml
vendored
@@ -1,80 +0,0 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call: {}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build - Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Npm
|
||||
- name: Setup - Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Plugins - Set List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Gradle - Build
|
||||
shell: bash
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Artifacts - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Upload artifacts
|
||||
- name: Artifacts - Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Artifacts - Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
70
.github/workflows/workflow-frontend-test.yml
vendored
70
.github/workflows/workflow-frontend-test.yml
vendored
@@ -1,70 +0,0 @@
|
||||
name: Frontend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
|
||||
env:
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Cache Node Modules
|
||||
id: cache-node-modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
ui/node_modules
|
||||
key: modules-${{ hashFiles('ui/package-lock.json') }}
|
||||
|
||||
- name: Cache Playwright Binaries
|
||||
id: cache-playwright
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/ms-playwright
|
||||
key: playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
|
||||
- name: Npm - install
|
||||
if: steps.cache-node-modules.outputs.cache-hit != 'true'
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm - lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
reporter: github-pr-review
|
||||
workdir: ui
|
||||
|
||||
- name: Npm - Run build
|
||||
working-directory: ui
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
working-directory: ui
|
||||
run: npm run test:unit -- --coverage
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
working-directory: ui
|
||||
if: steps.cache-playwright.outputs.cache-hit != 'true'
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Run storybook component tests
|
||||
working-directory: ui
|
||||
run: npm run test:storybook -- --coverage
|
||||
88
.github/workflows/workflow-github-release.yml
vendored
88
.github/workflows/workflow-github-release.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Github - Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "The Github personal token."
|
||||
required: true
|
||||
SLACK_RELEASES_WEBHOOK_URL:
|
||||
description: "The Slack webhook URL."
|
||||
required: true
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Github - Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check out
|
||||
- name: Checkout - Repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- name: Checkout - Actions
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
sparse-checkout-cone-mode: true
|
||||
path: actions
|
||||
sparse-checkout: |
|
||||
.github/actions
|
||||
|
||||
# Download Exec
|
||||
# Must be done after checkout actions
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v5
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Check if current tag is latest
|
||||
id: is_latest
|
||||
run: |
|
||||
latest_tag=$(git tag | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | sed 's/^v//' | sort -V | tail -n1)
|
||||
current_tag="${GITHUB_REF_NAME#v}"
|
||||
if [ "$current_tag" = "$latest_tag" ]; then
|
||||
echo "latest=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "latest=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
env:
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
uses: ./actions/.github/actions/github-release
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
env:
|
||||
MAKE_LATEST: ${{ steps.is_latest.outputs.latest }}
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
|
||||
- name: Merge Release Notes
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
uses: ./actions/.github/actions/github-release-note-merge
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
200
.github/workflows/workflow-publish-docker.yml
vendored
200
.github/workflows/workflow-publish-docker.yml
vendored
@@ -1,200 +0,0 @@
|
||||
name: Create Docker images on Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retag-latest:
|
||||
description: 'Retag latest Docker images'
|
||||
required: true
|
||||
type: choice
|
||||
default: "false"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
release-tag:
|
||||
description: 'Kestra Release Tag (by default, deduced with the ref)'
|
||||
required: false
|
||||
type: string
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: choice
|
||||
default: "true"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Plugin version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: string
|
||||
default: "true"
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
|
||||
jobs:
|
||||
plugins:
|
||||
name: List Plugins
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins
|
||||
with: # remap LATEST-SNAPSHOT to LATEST
|
||||
plugin-version: ${{ env.PLUGIN_VERSION == 'LATEST-SNAPSHOT' && 'LATEST' || env.PLUGIN_VERSION }}
|
||||
|
||||
# ********************************************************************************************************************
|
||||
# Build
|
||||
# ********************************************************************************************************************
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
if: ${{ inputs.force-download-artifact == 'true' }}
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
|
||||
docker:
|
||||
name: Publish Docker
|
||||
needs: [ plugins, build-artifacts ]
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: jattach
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python-is-python3 python3-pip curl jattach
|
||||
python-libs: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Vars
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
if [[ "${{ inputs.release-tag }}" == "" ]]; then
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="${{ inputs.release-tag }}"
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
if [[ $TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
# this will remove the patch version number
|
||||
MINOR_SEMVER=${TAG%.*}
|
||||
echo "minor_semver=${MINOR_SEMVER}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Tag '$TAG' is not a valid semver (vMAJOR.MINOR.PATCH), skipping minor_semver"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
|
||||
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Download executable from artifact
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Docker Build and push
|
||||
- name: Push to Docker Hub
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libs }}
|
||||
|
||||
- name: Install regctl
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: regclient/actions/regctl-installer@main
|
||||
|
||||
- name: Retag to minor semver version
|
||||
if: startsWith(github.ref, 'refs/tags/v') && steps.vars.outputs.minor_semver != ''
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.minor_semver, matrix.image.name) }}
|
||||
|
||||
- name: Retag to latest
|
||||
if: startsWith(github.ref, 'refs/tags/v') && inputs.retag-latest == 'true'
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ':github-actions:'
|
||||
channel: 'C02DQ1A7JLR' # _int_git channel
|
||||
57
.github/workflows/workflow-publish-maven.yml
vendored
57
.github/workflows/workflow-publish-maven.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Publish - Maven
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish - Maven
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Setup build
|
||||
- name: Setup - Build
|
||||
uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish - Release package to Maven Central
|
||||
shell: bash
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_mavenCentralUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_mavenCentralPassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToMavenCentral
|
||||
|
||||
# Gradle dependency
|
||||
- name: Java - Gradle dependency graph
|
||||
uses: gradle/actions/dependency-submission@v4
|
||||
@@ -1,78 +0,0 @@
|
||||
name: Pull Request - Publish Docker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
|
||||
publish:
|
||||
name: Publish Docker
|
||||
if: github.repository == github.event.pull_request.head.repo.full_name # prevent running on forks
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-artifacts
|
||||
env:
|
||||
GITHUB_IMAGE_PATH: "ghcr.io/kestra-io/kestra-pr"
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Docker - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
- name: Docker - Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.pr
|
||||
push: true
|
||||
tags: ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
# Add comment on pull request
|
||||
- name: Add comment to PR
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
await github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `**🐋 Docker image**: \`${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }}\`\n` +
|
||||
`\n` +
|
||||
`\`\`\`bash\n` +
|
||||
`docker run --pull=always --rm -it -p 8080:8080 --user=root -v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp ${{ env.GITHUB_IMAGE_PATH }}:${{ github.event.pull_request.number }} server local\n` +
|
||||
`\`\`\``
|
||||
})
|
||||
85
.github/workflows/workflow-release.yml
vendored
85
.github/workflows/workflow-release.yml
vendored
@@ -1,85 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
publish-docker:
|
||||
description: "Publish Docker image"
|
||||
default: 'false'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "GH personnal Token."
|
||||
required: true
|
||||
SLACK_RELEASES_WEBHOOK_URL:
|
||||
description: "Slack webhook for releases channel."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
if: github.ref == 'refs/heads/develop' || inputs.publish-docker == 'true'
|
||||
with:
|
||||
force-download-artifact: 'false'
|
||||
plugin-version: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
uses: ./.github/workflows/workflow-publish-maven.yml
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
97
.github/workflows/workflow-test.yml
vendored
97
.github/workflows/workflow-test.yml
vendored
@@ -1,97 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * 1,2,3,4,5'
|
||||
workflow_call:
|
||||
inputs:
|
||||
report-status:
|
||||
description: "Report status of the jobs in outputs"
|
||||
type: string
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
frontend_status:
|
||||
description: "Status of the frontend job"
|
||||
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status:
|
||||
description: "Status of the backend job"
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
# Output every job status
|
||||
# To be used in other workflows
|
||||
report-status:
|
||||
name: Report Status
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: always() && (inputs.report-status == 'true')
|
||||
outputs:
|
||||
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
|
||||
steps:
|
||||
- id: set-frontend-status
|
||||
name: Set frontend job status
|
||||
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
|
||||
|
||||
- id: set-backend-status
|
||||
name: Set backend job status
|
||||
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
|
||||
|
||||
notify:
|
||||
name: Notify - Slack
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: |
|
||||
always() && (needs.frontend.result != 'success' ||
|
||||
needs.backend.result != 'success')
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
13
README.md
13
README.md
@@ -19,9 +19,12 @@
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
<a href="https://x.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="X(formerly Twitter)" /></a>
|
||||
<a href="https://www.linkedin.com/company/kestra/"><img height="25" src="https://kestra.io/linkedin.svg" alt="linkedin" /></a>
|
||||
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a>
|
||||
<a href="https://twitter.com/kestra_io" style="margin: 0 10px;">
|
||||
<img height="25" src="https://kestra.io/twitter.svg" alt="twitter" width="35" height="25" /></a>
|
||||
<a href="https://www.linkedin.com/company/kestra/" style="margin: 0 10px;">
|
||||
<img height="25" src="https://kestra.io/linkedin.svg" alt="linkedin" width="35" height="25" /></a>
|
||||
<a href="https://www.youtube.com/@kestra-io" style="margin: 0 10px;">
|
||||
<img height="25" src="https://kestra.io/youtube.svg" alt="youtube" width="35" height="25" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@@ -33,10 +36,10 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://go.kestra.io/video/product-overview" target="_blank">
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 3 minutes with Kestra" width="640px" />
|
||||
</a>
|
||||
</p>
|
||||
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 4 minutes.</i></p>
|
||||
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 3 minutes.</i></p>
|
||||
|
||||
|
||||
## 🌟 What is Kestra?
|
||||
|
||||
27
build.gradle
27
build.gradle
@@ -25,19 +25,19 @@ plugins {
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
id "com.github.ben-manes.versions" version "0.52.0"
|
||||
id "com.github.ben-manes.versions" version "0.53.0"
|
||||
|
||||
// front
|
||||
id 'com.github.node-gradle.node' version '7.1.0'
|
||||
|
||||
// release
|
||||
id 'net.researchgate.release' version '3.1.0'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.2"
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.3"
|
||||
id 'signing'
|
||||
id "com.vanniktech.maven.publish" version "0.34.0"
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.1.3" apply false
|
||||
id "org.owasp.dependencycheck" version "12.1.5" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -168,8 +168,9 @@ allprojects {
|
||||
/**********************************************************************************************************************\
|
||||
* Test
|
||||
**********************************************************************************************************************/
|
||||
subprojects {
|
||||
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
|
||||
subprojects {subProj ->
|
||||
|
||||
if (subProj.name != 'platform' && subProj.name != 'jmh-benchmarks') {
|
||||
apply plugin: "com.adarshr.test-logger"
|
||||
|
||||
java {
|
||||
@@ -207,6 +208,13 @@ subprojects {
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
reports {
|
||||
junitXml.required = true
|
||||
junitXml.outputPerTestCase = true
|
||||
junitXml.mergeReruns = true
|
||||
junitXml.includeSystemErrLog = true;
|
||||
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
|
||||
}
|
||||
|
||||
// set Xmx for test workers
|
||||
maxHeapSize = '4g'
|
||||
@@ -222,6 +230,15 @@ subprojects {
|
||||
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
|
||||
environment 'ENV_TEST1', "true"
|
||||
environment 'ENV_TEST2', "Pass by env"
|
||||
|
||||
|
||||
if (subProj.name == 'core' || subProj.name == 'jdbc-h2' || subProj.name == 'jdbc-mysql' || subProj.name == 'jdbc-postgres') {
|
||||
// JUnit 5 parallel settings
|
||||
systemProperty 'junit.jupiter.execution.parallel.enabled', 'true'
|
||||
systemProperty 'junit.jupiter.execution.parallel.mode.default', 'concurrent'
|
||||
systemProperty 'junit.jupiter.execution.parallel.mode.classes.default', 'same_thread'
|
||||
systemProperty 'junit.jupiter.execution.parallel.config.strategy', 'dynamic'
|
||||
}
|
||||
}
|
||||
|
||||
testlogger {
|
||||
|
||||
@@ -40,5 +40,6 @@ dependencies {
|
||||
implementation project(":worker")
|
||||
|
||||
//test
|
||||
testImplementation project(':tests')
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
}
|
||||
@@ -49,7 +49,7 @@ import java.util.concurrent.Callable;
|
||||
@Introspected
|
||||
public class App implements Callable<Integer> {
|
||||
public static void main(String[] args) {
|
||||
execute(App.class, args);
|
||||
execute(App.class, new String [] { Environment.CLI }, args);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -57,13 +57,13 @@ public class App implements Callable<Integer> {
|
||||
return PicocliRunner.call(App.class, "--help");
|
||||
}
|
||||
|
||||
protected static void execute(Class<?> cls, String... args) {
|
||||
protected static void execute(Class<?> cls, String[] environments, String... args) {
|
||||
// Log Bridge
|
||||
SLF4JBridgeHandler.removeHandlersForRootLogger();
|
||||
SLF4JBridgeHandler.install();
|
||||
|
||||
// Init ApplicationContext
|
||||
ApplicationContext applicationContext = App.applicationContext(cls, args);
|
||||
ApplicationContext applicationContext = App.applicationContext(cls, environments, args);
|
||||
|
||||
// Call Picocli command
|
||||
int exitCode = 0;
|
||||
@@ -80,6 +80,7 @@ public class App implements Callable<Integer> {
|
||||
System.exit(Objects.requireNonNullElse(exitCode, 0));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create an {@link ApplicationContext} with additional properties based on configuration files (--config) and
|
||||
* forced Properties from current command.
|
||||
@@ -88,12 +89,13 @@ public class App implements Callable<Integer> {
|
||||
* @return the application context created
|
||||
*/
|
||||
protected static ApplicationContext applicationContext(Class<?> mainClass,
|
||||
String[] environments,
|
||||
String[] args) {
|
||||
|
||||
ApplicationContextBuilder builder = ApplicationContext
|
||||
.builder()
|
||||
.mainClass(mainClass)
|
||||
.environments(Environment.CLI);
|
||||
.environments(environments);
|
||||
|
||||
CommandLine cmd = new CommandLine(mainClass, CommandLine.defaultFactory());
|
||||
continueOnParsingErrors(cmd);
|
||||
|
||||
@@ -2,19 +2,27 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
|
||||
@Slf4j
|
||||
public abstract class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
|
||||
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
|
||||
Integer serverPort;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
log.info("Machine information: {} available cpu(s), {}MB max memory, Java version {}", Runtime.getRuntime().availableProcessors(), maxMemoryInMB(), Runtime.version());
|
||||
|
||||
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
|
||||
|
||||
return super.call();
|
||||
}
|
||||
|
||||
private long maxMemoryInMB() {
|
||||
return Runtime.getRuntime().maxMemory() / 1024 / 1024;
|
||||
}
|
||||
|
||||
protected static int defaultWorkerThread() {
|
||||
return Runtime.getRuntime().availableProcessors() * 8;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ package io.kestra.cli.commands.servers;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.executor.SkipExecutionService;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
|
||||
@@ -4,10 +4,13 @@ import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.Indexer;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@CommandLine.Command(
|
||||
@@ -17,6 +20,11 @@ import java.util.Map;
|
||||
public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@Inject
|
||||
private SkipExecutionService skipExecutionService;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static Map<String, Object> propertiesOverrides() {
|
||||
@@ -27,6 +35,8 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.skipExecutionService.setSkipIndexerRecords(skipIndexerRecords);
|
||||
|
||||
super.call();
|
||||
|
||||
Indexer indexer = applicationContext.getBean(Indexer.class);
|
||||
|
||||
@@ -7,7 +7,7 @@ import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
import io.kestra.cli.StandAloneRunner;
|
||||
import io.kestra.executor.SkipExecutionService;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -63,6 +63,9 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "a list of tenants to skip, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipTenants = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
|
||||
boolean tutorialsDisabled = false;
|
||||
|
||||
@@ -93,6 +96,7 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
this.skipExecutionService.setSkipFlows(skipFlows);
|
||||
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
|
||||
this.skipExecutionService.setSkipTenants(skipTenants);
|
||||
this.skipExecutionService.setSkipIndexerRecords(skipIndexerRecords);
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
|
||||
|
||||
@@ -5,12 +5,15 @@ import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.Indexer;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
@@ -28,11 +31,17 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ExecutorsUtils executorsUtils;
|
||||
|
||||
@Inject
|
||||
private SkipExecutionService skipExecutionService;
|
||||
|
||||
@Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
|
||||
boolean tutorialsDisabled = false;
|
||||
private boolean tutorialsDisabled = false;
|
||||
|
||||
@Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
|
||||
boolean indexerDisabled = false;
|
||||
private boolean indexerDisabled = false;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@Override
|
||||
public boolean isFlowAutoLoadEnabled() {
|
||||
@@ -48,6 +57,8 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.skipExecutionService.setSkipIndexerRecords(skipIndexerRecords);
|
||||
|
||||
super.call();
|
||||
|
||||
// start the indexer
|
||||
|
||||
@@ -262,6 +262,8 @@ public class FileChangedEventListener {
|
||||
}
|
||||
|
||||
private String getTenantIdFromPath(Path path) {
|
||||
// FIXME there is probably a bug here when a tenant has '_' in its name,
|
||||
// a valid tenant name is defined with following regex: "^[a-z0-9][a-z0-9_-]*"
|
||||
return path.getFileName().toString().split("_")[0];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,6 +167,9 @@ kestra:
|
||||
open-urls:
|
||||
- "/ping"
|
||||
- "/api/v1/executions/webhook/"
|
||||
- "/api/v1/main/executions/webhook/"
|
||||
- "/api/v1/*/executions/webhook/"
|
||||
- "/api/v1/basicAuthValidationErrors"
|
||||
|
||||
preview:
|
||||
initial-rows: 100
|
||||
|
||||
@@ -37,7 +37,7 @@ class AppTest {
|
||||
|
||||
final String[] args = new String[]{"server", serverType, "--help"};
|
||||
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, args)) {
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, args)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
|
||||
|
||||
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
|
||||
@@ -52,7 +52,7 @@ class AppTest {
|
||||
|
||||
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
|
||||
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString()).startsWith("Missing required parameters: ");
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.junit.annotations.FlakyTest;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.junitpioneer.jupiter.RetryingTest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
@@ -18,8 +19,8 @@ import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import org.junitpioneer.jupiter.RetryingTest;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@@ -57,10 +58,12 @@ class FileChangedEventListenerTest {
|
||||
}
|
||||
}
|
||||
|
||||
@RetryingTest(5) // Flaky on CI but always pass locally
|
||||
@FlakyTest
|
||||
@RetryingTest(2)
|
||||
void test() throws IOException, TimeoutException {
|
||||
var tenant = TestsUtils.randomTenant(FileChangedEventListenerTest.class.getSimpleName(), "test");
|
||||
// remove the flow if it already exists
|
||||
flowRepository.findByIdWithSource(MAIN_TENANT, "io.kestra.tests.watch", "myflow").ifPresent(flow -> flowRepository.delete(flow));
|
||||
flowRepository.findByIdWithSource(tenant, "io.kestra.tests.watch", "myflow").ifPresent(flow -> flowRepository.delete(flow));
|
||||
|
||||
// create a basic flow
|
||||
String flow = """
|
||||
@@ -73,14 +76,14 @@ class FileChangedEventListenerTest {
|
||||
message: Hello World! 🚀
|
||||
""";
|
||||
|
||||
GenericFlow genericFlow = GenericFlow.fromYaml(MAIN_TENANT, flow);
|
||||
GenericFlow genericFlow = GenericFlow.fromYaml(tenant, flow);
|
||||
Files.write(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"), flow.getBytes());
|
||||
Await.until(
|
||||
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isPresent(),
|
||||
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "myflow").isPresent(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Flow myflow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").orElseThrow();
|
||||
Flow myflow = flowRepository.findById(tenant, "io.kestra.tests.watch", "myflow").orElseThrow();
|
||||
assertThat(myflow.getTasks()).hasSize(1);
|
||||
assertThat(myflow.getTasks().getFirst().getId()).isEqualTo("hello");
|
||||
assertThat(myflow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
|
||||
@@ -88,16 +91,18 @@ class FileChangedEventListenerTest {
|
||||
// delete the flow
|
||||
Files.delete(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"));
|
||||
Await.until(
|
||||
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isEmpty(),
|
||||
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "myflow").isEmpty(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
}
|
||||
|
||||
@RetryingTest(5) // Flaky on CI but always pass locally
|
||||
@FlakyTest
|
||||
@RetryingTest(2)
|
||||
void testWithPluginDefault() throws IOException, TimeoutException {
|
||||
var tenant = TestsUtils.randomTenant(FileChangedEventListenerTest.class.getName(), "testWithPluginDefault");
|
||||
// remove the flow if it already exists
|
||||
flowRepository.findByIdWithSource(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").ifPresent(flow -> flowRepository.delete(flow));
|
||||
flowRepository.findByIdWithSource(tenant, "io.kestra.tests.watch", "pluginDefault").ifPresent(flow -> flowRepository.delete(flow));
|
||||
|
||||
// create a flow with plugin default
|
||||
String pluginDefault = """
|
||||
@@ -113,14 +118,14 @@ class FileChangedEventListenerTest {
|
||||
values:
|
||||
message: Hello World!
|
||||
""";
|
||||
GenericFlow genericFlow = GenericFlow.fromYaml(MAIN_TENANT, pluginDefault);
|
||||
GenericFlow genericFlow = GenericFlow.fromYaml(tenant, pluginDefault);
|
||||
Files.write(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"), pluginDefault.getBytes());
|
||||
Await.until(
|
||||
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isPresent(),
|
||||
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "pluginDefault").isPresent(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Flow pluginDefaultFlow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
|
||||
Flow pluginDefaultFlow = flowRepository.findById(tenant, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
|
||||
assertThat(pluginDefaultFlow.getTasks()).hasSize(1);
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getId()).isEqualTo("helloWithDefault");
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
|
||||
@@ -128,7 +133,7 @@ class FileChangedEventListenerTest {
|
||||
// delete both files
|
||||
Files.delete(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"));
|
||||
Await.until(
|
||||
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
|
||||
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
|
||||
@@ -84,7 +84,7 @@ dependencies {
|
||||
|
||||
testImplementation "org.testcontainers:testcontainers:1.21.3"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
|
||||
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.81"
|
||||
testImplementation "org.bouncycastle:bcpkix-jdk18on"
|
||||
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
}
|
||||
|
||||
@@ -118,7 +118,7 @@ public class JsonSchemaGenerator {
|
||||
removeRequiredOnPropsWithDefaults(objectNode);
|
||||
|
||||
return MAPPER.convertValue(objectNode, MAP_TYPE_REFERENCE);
|
||||
} catch (IllegalArgumentException e) {
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("Unable to generate jsonschema for '" + cls.getName() + "'", e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,30 +3,88 @@ package io.kestra.core.events;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.context.ServerRequestContext;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
|
||||
@AllArgsConstructor
|
||||
import java.util.Objects;
|
||||
|
||||
@Getter
|
||||
public class CrudEvent<T> {
|
||||
T model;
|
||||
private final T model;
|
||||
@Nullable
|
||||
T previousModel;
|
||||
CrudEventType type;
|
||||
HttpRequest<?> request;
|
||||
|
||||
private final T previousModel;
|
||||
private final CrudEventType type;
|
||||
private final HttpRequest<?> request;
|
||||
|
||||
/**
|
||||
* Static helper method for creating a new {@link CrudEventType#UPDATE} CrudEvent.
|
||||
*
|
||||
* @param model the new created model.
|
||||
* @param <T> type of the model.
|
||||
* @return the new {@link CrudEvent}.
|
||||
*/
|
||||
public static <T> CrudEvent<T> create(T model) {
|
||||
Objects.requireNonNull(model, "Can't create CREATE event with a null model");
|
||||
return new CrudEvent<>(model, null, CrudEventType.CREATE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for creating a new {@link CrudEventType#DELETE} CrudEvent.
|
||||
*
|
||||
* @param model the deleted model.
|
||||
* @param <T> type of the model.
|
||||
* @return the new {@link CrudEvent}.
|
||||
*/
|
||||
public static <T> CrudEvent<T> delete(T model) {
|
||||
Objects.requireNonNull(model, "Can't create DELETE event with a null model");
|
||||
return new CrudEvent<>(null, model, CrudEventType.DELETE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for creating a new CrudEvent.
|
||||
*
|
||||
* @param before the model before the update.
|
||||
* @param after the model after the update.
|
||||
* @param <T> type of the model.
|
||||
* @return the new {@link CrudEvent}.
|
||||
*/
|
||||
public static <T> CrudEvent<T> of(T before, T after) {
|
||||
|
||||
if (before == null && after == null) {
|
||||
throw new IllegalArgumentException("Both before and after cannot be null");
|
||||
}
|
||||
|
||||
if (before == null) {
|
||||
return create(after);
|
||||
}
|
||||
|
||||
if (after == null) {
|
||||
return delete(before);
|
||||
}
|
||||
|
||||
return new CrudEvent<>(after, before, CrudEventType.UPDATE);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use the static factory methods.
|
||||
*/
|
||||
@Deprecated
|
||||
public CrudEvent(T model, CrudEventType type) {
|
||||
this.model = model;
|
||||
this.type = type;
|
||||
this.previousModel = null;
|
||||
this.request = ServerRequestContext.currentRequest().orElse(null);
|
||||
this(
|
||||
CrudEventType.DELETE.equals(type) ? null : model,
|
||||
CrudEventType.DELETE.equals(type) ? model : null,
|
||||
type,
|
||||
ServerRequestContext.currentRequest().orElse(null)
|
||||
);
|
||||
}
|
||||
|
||||
public CrudEvent(T model, T previousModel, CrudEventType type) {
|
||||
this(model, previousModel, type, ServerRequestContext.currentRequest().orElse(null));
|
||||
}
|
||||
|
||||
public CrudEvent(T model, T previousModel, CrudEventType type, HttpRequest<?> request) {
|
||||
this.model = model;
|
||||
this.previousModel = previousModel;
|
||||
this.type = type;
|
||||
this.request = ServerRequestContext.currentRequest().orElse(null);
|
||||
this.request = request;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
import io.kestra.core.utils.MapUtils;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public record Label(@NotNull String key, @NotNull String value) {
|
||||
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
|
||||
public record Label(@NotEmpty String key, @NotEmpty String value) {
|
||||
public static final String SYSTEM_PREFIX = "system.";
|
||||
|
||||
// system labels
|
||||
@@ -41,7 +44,7 @@ public record Label(@NotNull String key, @NotNull String value) {
|
||||
public static Map<String, String> toMap(@Nullable List<Label> labels) {
|
||||
if (labels == null || labels.isEmpty()) return Collections.emptyMap();
|
||||
return labels.stream()
|
||||
.filter(label -> label.value() != null && label.key() != null)
|
||||
.filter(label -> label.value() != null && !label.value().isEmpty() && label.key() != null && !label.key().isEmpty())
|
||||
// using an accumulator in case labels with the same key exists: the second is kept
|
||||
.collect(Collectors.toMap(Label::key, Label::value, (first, second) -> second, LinkedHashMap::new));
|
||||
}
|
||||
@@ -56,6 +59,7 @@ public record Label(@NotNull String key, @NotNull String value) {
|
||||
public static List<Label> deduplicate(@Nullable List<Label> labels) {
|
||||
if (labels == null || labels.isEmpty()) return Collections.emptyList();
|
||||
return toMap(labels).entrySet().stream()
|
||||
.filter(getEntryNotEmptyPredicate())
|
||||
.map(entry -> new Label(entry.getKey(), entry.getValue()))
|
||||
.collect(Collectors.toCollection(ArrayList::new));
|
||||
}
|
||||
@@ -70,6 +74,7 @@ public record Label(@NotNull String key, @NotNull String value) {
|
||||
if (map == null || map.isEmpty()) return List.of();
|
||||
return map.entrySet()
|
||||
.stream()
|
||||
.filter(getEntryNotEmptyPredicate())
|
||||
.map(entry -> new Label(entry.getKey(), entry.getValue()))
|
||||
.toList();
|
||||
}
|
||||
@@ -88,4 +93,14 @@ public record Label(@NotNull String key, @NotNull String value) {
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides predicate for not empty entries.
|
||||
*
|
||||
* @return The non-empty filter
|
||||
*/
|
||||
public static Predicate<Map.Entry<String, String>> getEntryNotEmptyPredicate() {
|
||||
return entry -> entry.getKey() != null && !entry.getKey().isEmpty() &&
|
||||
entry.getValue() != null && !entry.getValue().isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,33 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Interface that can be implemented by classes supporting plugin versioning.
|
||||
*
|
||||
* @see Plugin
|
||||
*/
|
||||
public interface PluginVersioning {
|
||||
|
||||
String TITLE = "Plugin Version";
|
||||
String DESCRIPTION = """
|
||||
Defines the version of the plugin to use.
|
||||
|
||||
@Pattern(regexp="\\d+\\.\\d+\\.\\d+(-[a-zA-Z0-9-]+)?|([a-zA-Z0-9]+)")
|
||||
@Schema(title = "The version of the plugin to use.")
|
||||
The version must follow the Semantic Versioning (SemVer) specification:
|
||||
- A single-digit MAJOR version (e.g., `1`).
|
||||
- A MAJOR.MINOR version (e.g., `1.1`).
|
||||
- A MAJOR.MINOR.PATCH version, optionally with any qualifier
|
||||
(e.g., `1.1.2`, `1.1.0-SNAPSHOT`).
|
||||
""";
|
||||
|
||||
@Schema(
|
||||
title = TITLE,
|
||||
description = DESCRIPTION
|
||||
)
|
||||
String getVersion();
|
||||
}
|
||||
|
||||
@@ -254,19 +254,7 @@ public record QueryFilter(
|
||||
*
|
||||
* @return List of {@code ResourceField} with resource names, fields, and operations.
|
||||
*/
|
||||
public static List<ResourceField> asResourceList() {
|
||||
return Arrays.stream(values())
|
||||
.map(Resource::toResourceField)
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static ResourceField toResourceField(Resource resource) {
|
||||
List<FieldOp> fieldOps = resource.supportedField().stream()
|
||||
.map(Resource::toFieldInfo)
|
||||
.toList();
|
||||
return new ResourceField(resource.name().toLowerCase(), fieldOps);
|
||||
}
|
||||
|
||||
|
||||
private static FieldOp toFieldInfo(Field field) {
|
||||
List<Operation> operations = field.supportedOp().stream()
|
||||
.map(Resource::toOperation)
|
||||
@@ -279,9 +267,6 @@ public record QueryFilter(
|
||||
}
|
||||
}
|
||||
|
||||
public record ResourceField(String name, List<FieldOp> fields) {
|
||||
}
|
||||
|
||||
public record FieldOp(String name, String value, List<Operation> operations) {
|
||||
}
|
||||
|
||||
|
||||
@@ -17,31 +17,12 @@ import java.util.List;
|
||||
@Introspected
|
||||
public class ExecutionUsage {
|
||||
private final List<DailyExecutionStatistics> dailyExecutionsCount;
|
||||
private final List<DailyExecutionStatistics> dailyTaskRunsCount;
|
||||
|
||||
public static ExecutionUsage of(final String tenantId,
|
||||
final ExecutionRepositoryInterface executionRepository,
|
||||
final ZonedDateTime from,
|
||||
final ZonedDateTime to) {
|
||||
|
||||
List<DailyExecutionStatistics> dailyTaskRunsCount = null;
|
||||
|
||||
try {
|
||||
dailyTaskRunsCount = executionRepository.dailyStatistics(
|
||||
null,
|
||||
tenantId,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
null,
|
||||
true);
|
||||
} catch (UnsupportedOperationException ignored) {
|
||||
|
||||
}
|
||||
|
||||
return ExecutionUsage.builder()
|
||||
.dailyExecutionsCount(executionRepository.dailyStatistics(
|
||||
null,
|
||||
@@ -52,28 +33,13 @@ public class ExecutionUsage {
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
null,
|
||||
false))
|
||||
.dailyTaskRunsCount(dailyTaskRunsCount)
|
||||
null))
|
||||
.build();
|
||||
}
|
||||
|
||||
public static ExecutionUsage of(final ExecutionRepositoryInterface repository,
|
||||
final ZonedDateTime from,
|
||||
final ZonedDateTime to) {
|
||||
List<DailyExecutionStatistics> dailyTaskRunsCount = null;
|
||||
try {
|
||||
dailyTaskRunsCount = repository.dailyStatisticsForAllTenants(
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
true
|
||||
);
|
||||
} catch (UnsupportedOperationException ignored) {}
|
||||
|
||||
return ExecutionUsage.builder()
|
||||
.dailyExecutionsCount(repository.dailyStatisticsForAllTenants(
|
||||
null,
|
||||
@@ -81,10 +47,8 @@ public class ExecutionUsage {
|
||||
null,
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
false
|
||||
DateUtils.GroupType.DAY
|
||||
))
|
||||
.dailyTaskRunsCount(dailyTaskRunsCount)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -272,7 +272,7 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
}
|
||||
|
||||
public Execution withTaskRun(TaskRun taskRun) throws InternalException {
|
||||
ArrayList<TaskRun> newTaskRunList = new ArrayList<>(this.taskRunList);
|
||||
ArrayList<TaskRun> newTaskRunList = this.taskRunList == null ? new ArrayList<>() : new ArrayList<>(this.taskRunList);
|
||||
|
||||
boolean b = Collections.replaceAll(
|
||||
newTaskRunList,
|
||||
@@ -865,20 +865,18 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
* @param e the exception raise
|
||||
* @return new taskRun with updated attempt with logs
|
||||
*/
|
||||
private FailedTaskRunWithLog lastAttemptsTaskRunForFailedExecution(TaskRun taskRun,
|
||||
TaskRunAttempt lastAttempt, Exception e) {
|
||||
private FailedTaskRunWithLog lastAttemptsTaskRunForFailedExecution(TaskRun taskRun, TaskRunAttempt lastAttempt, Exception e) {
|
||||
TaskRun failed = taskRun
|
||||
.withAttempts(
|
||||
Stream
|
||||
.concat(
|
||||
taskRun.getAttempts().stream().limit(taskRun.getAttempts().size() - 1),
|
||||
Stream.of(lastAttempt.getState().isFailed() ? lastAttempt : lastAttempt.withState(State.Type.FAILED))
|
||||
)
|
||||
.toList()
|
||||
);
|
||||
return new FailedTaskRunWithLog(
|
||||
taskRun
|
||||
.withAttempts(
|
||||
Stream
|
||||
.concat(
|
||||
taskRun.getAttempts().stream().limit(taskRun.getAttempts().size() - 1),
|
||||
Stream.of(lastAttempt
|
||||
.withState(State.Type.FAILED))
|
||||
)
|
||||
.toList()
|
||||
)
|
||||
.withState(State.Type.FAILED),
|
||||
failed.getState().isFailed() ? failed : failed.withState(State.Type.FAILED),
|
||||
RunContextLogger.logEntries(loggingEventFromException(e), LogEntry.of(taskRun, kind))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -296,7 +296,7 @@ public class TaskRun implements TenantInterface {
|
||||
}
|
||||
|
||||
public TaskRun incrementIteration() {
|
||||
int iteration = this.iteration == null ? 1 : this.iteration;
|
||||
int iteration = this.iteration == null ? 0 : this.iteration;
|
||||
return this.toBuilder()
|
||||
.iteration(iteration + 1)
|
||||
.build();
|
||||
|
||||
@@ -3,7 +3,6 @@ package io.kestra.core.models.flows;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.tasks.WorkerGroup;
|
||||
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
|
||||
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
|
||||
@@ -61,7 +60,14 @@ public abstract class AbstractFlow implements FlowInterface {
|
||||
|
||||
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
|
||||
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
|
||||
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
|
||||
@Schema(
|
||||
description = "Labels as a list of Label (key/value pairs) or as a map of string to string.",
|
||||
oneOf = {
|
||||
Label[].class,
|
||||
Map.class
|
||||
}
|
||||
)
|
||||
@Valid
|
||||
List<Label> labels;
|
||||
|
||||
@Schema(additionalProperties = Schema.AdditionalPropertiesValue.TRUE)
|
||||
@@ -69,4 +75,5 @@ public abstract class AbstractFlow implements FlowInterface {
|
||||
|
||||
@Valid
|
||||
private WorkerGroup workerGroup;
|
||||
|
||||
}
|
||||
|
||||
@@ -86,10 +86,11 @@ public class State {
|
||||
|
||||
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
|
||||
public Duration getDuration() {
|
||||
return Duration.between(
|
||||
this.histories.getFirst().getDate(),
|
||||
this.histories.size() > 1 ? this.histories.get(this.histories.size() - 1).getDate() : Instant.now()
|
||||
);
|
||||
if(this.getEndDate().isPresent()){
|
||||
return Duration.between(this.getStartDate(), this.getEndDate().get());
|
||||
} else {
|
||||
return Duration.between(this.getStartDate(), Instant.now());
|
||||
}
|
||||
}
|
||||
|
||||
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
|
||||
|
||||
@@ -30,7 +30,7 @@ public class ResolvedTask {
|
||||
|
||||
public NextTaskRun toNextTaskRunIncrementIteration(Execution execution, Integer iteration) {
|
||||
return new NextTaskRun(
|
||||
TaskRun.of(execution, this).withIteration(iteration != null ? iteration : 1),
|
||||
TaskRun.of(execution, this).withIteration(iteration != null ? iteration : 0),
|
||||
this.getTask()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -185,34 +185,6 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
.build();
|
||||
}
|
||||
|
||||
public static Trigger update(Trigger currentTrigger, Trigger newTrigger, ZonedDateTime nextExecutionDate) throws Exception {
|
||||
Trigger updated = currentTrigger;
|
||||
|
||||
// If a backfill is created, we update the currentTrigger
|
||||
// and set the nextExecutionDate() as the previous one
|
||||
if (newTrigger.getBackfill() != null) {
|
||||
updated = currentTrigger.toBuilder()
|
||||
.backfill(
|
||||
newTrigger
|
||||
.getBackfill()
|
||||
.toBuilder()
|
||||
.end(newTrigger.getBackfill().getEnd() != null ? newTrigger.getBackfill().getEnd() : ZonedDateTime.now())
|
||||
.currentDate(
|
||||
newTrigger.getBackfill().getStart()
|
||||
)
|
||||
.previousNextExecutionDate(
|
||||
currentTrigger.getNextExecutionDate())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
return updated.toBuilder()
|
||||
.nextExecutionDate(newTrigger.getDisabled() ?
|
||||
null : nextExecutionDate)
|
||||
.disabled(newTrigger.getDisabled())
|
||||
.build();
|
||||
}
|
||||
|
||||
public Trigger resetExecution(Flow flow, Execution execution, ConditionContext conditionContext) {
|
||||
boolean disabled = this.getStopAfter() != null ? this.getStopAfter().contains(execution.getState().getCurrent()) : this.getDisabled();
|
||||
if (!disabled) {
|
||||
@@ -276,27 +248,22 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
.build();
|
||||
}
|
||||
|
||||
public Trigger initBackfill(Trigger newTrigger) {
|
||||
// If a backfill is created, we update the currentTrigger
|
||||
public Trigger withBackfill(final Backfill backfill) {
|
||||
Trigger updated = this;
|
||||
// If a backfill is created, we update the trigger
|
||||
// and set the nextExecutionDate() as the previous one
|
||||
if (newTrigger.getBackfill() != null) {
|
||||
|
||||
return this.toBuilder()
|
||||
if (backfill != null) {
|
||||
updated = this.toBuilder()
|
||||
.backfill(
|
||||
newTrigger
|
||||
.getBackfill()
|
||||
backfill
|
||||
.toBuilder()
|
||||
.end(newTrigger.getBackfill().getEnd() != null ? newTrigger.getBackfill().getEnd() : ZonedDateTime.now())
|
||||
.currentDate(
|
||||
newTrigger.getBackfill().getStart()
|
||||
)
|
||||
.previousNextExecutionDate(
|
||||
this.getNextExecutionDate())
|
||||
.end(backfill.getEnd() != null ? backfill.getEnd() : ZonedDateTime.now())
|
||||
.currentDate(backfill.getStart())
|
||||
.previousNextExecutionDate(this.getNextExecutionDate())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
return this;
|
||||
return updated;
|
||||
}
|
||||
|
||||
// if the next date is after the backfill end, we remove the backfill
|
||||
|
||||
@@ -56,7 +56,7 @@ public class DefaultPluginRegistry implements PluginRegistry {
|
||||
*
|
||||
* @return the {@link DefaultPluginRegistry}.
|
||||
*/
|
||||
public static DefaultPluginRegistry getOrCreate() {
|
||||
public synchronized static DefaultPluginRegistry getOrCreate() {
|
||||
DefaultPluginRegistry instance = LazyHolder.INSTANCE;
|
||||
if (!instance.isInitialized()) {
|
||||
instance.init();
|
||||
@@ -74,7 +74,7 @@ public class DefaultPluginRegistry implements PluginRegistry {
|
||||
/**
|
||||
* Initializes the registry by loading all core plugins.
|
||||
*/
|
||||
protected void init() {
|
||||
protected synchronized void init() {
|
||||
if (initialized.compareAndSet(false, true)) {
|
||||
register(scanner.scan());
|
||||
}
|
||||
@@ -200,7 +200,7 @@ public class DefaultPluginRegistry implements PluginRegistry {
|
||||
if (existing != null && existing.crc32() == plugin.crc32()) {
|
||||
return; // same plugin already registered
|
||||
}
|
||||
|
||||
|
||||
lock.lock();
|
||||
try {
|
||||
if (existing != null) {
|
||||
@@ -212,7 +212,7 @@ public class DefaultPluginRegistry implements PluginRegistry {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected void registerAll(Map<PluginIdentifier, PluginClassAndMetadata<? extends Plugin>> plugins) {
|
||||
pluginClassByIdentifier.putAll(plugins);
|
||||
}
|
||||
|
||||
@@ -6,6 +6,12 @@ import lombok.Getter;
|
||||
import lombok.ToString;
|
||||
|
||||
import java.net.URL;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Enumeration;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
import java.util.zip.CRC32;
|
||||
|
||||
@AllArgsConstructor
|
||||
@Getter
|
||||
@@ -14,5 +20,59 @@ import java.net.URL;
|
||||
public class ExternalPlugin {
|
||||
private final URL location;
|
||||
private final URL[] resources;
|
||||
private final long crc32;
|
||||
private volatile Long crc32; // lazy-val
|
||||
|
||||
public ExternalPlugin(URL location, URL[] resources) {
|
||||
this.location = location;
|
||||
this.resources = resources;
|
||||
}
|
||||
|
||||
public Long getCrc32() {
|
||||
if (this.crc32 == null) {
|
||||
synchronized (this) {
|
||||
if (this.crc32 == null) {
|
||||
this.crc32 = computeJarCrc32(location);
|
||||
}
|
||||
}
|
||||
}
|
||||
return crc32;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a CRC32 of the JAR File without reading the whole file
|
||||
*
|
||||
* @param location of the JAR File.
|
||||
* @return the CRC32 of {@code -1} if the checksum can't be computed.
|
||||
*/
|
||||
private static long computeJarCrc32(final URL location) {
|
||||
CRC32 crc = new CRC32();
|
||||
try (JarFile jar = new JarFile(location.toURI().getPath(), false)) {
|
||||
Enumeration<JarEntry> entries = jar.entries();
|
||||
byte[] buffer = new byte[Long.BYTES]; // reusable buffer to avoid re-allocation
|
||||
|
||||
while (entries.hasMoreElements()) {
|
||||
JarEntry entry = entries.nextElement();
|
||||
crc.update(entry.getName().getBytes(StandardCharsets.UTF_8));
|
||||
updateCrc32WithLong(crc, buffer, entry.getSize());
|
||||
updateCrc32WithLong(crc, buffer, entry.getCrc());
|
||||
}
|
||||
|
||||
return crc.getValue();
|
||||
} catch (Exception e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
private static void updateCrc32WithLong(CRC32 crc32, byte[] reusable, long val) {
|
||||
// fast long -> byte conversion
|
||||
reusable[0] = (byte) (val >>> 56);
|
||||
reusable[1] = (byte) (val >>> 48);
|
||||
reusable[2] = (byte) (val >>> 40);
|
||||
reusable[3] = (byte) (val >>> 32);
|
||||
reusable[4] = (byte) (val >>> 24);
|
||||
reusable[5] = (byte) (val >>> 16);
|
||||
reusable[6] = (byte) (val >>> 8);
|
||||
reusable[7] = (byte) val;
|
||||
crc32.update(reusable);;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@ public class PluginClassLoader extends URLClassLoader {
|
||||
+ "|dev.failsafe"
|
||||
+ "|reactor"
|
||||
+ "|io.opentelemetry"
|
||||
+ "|io.netty"
|
||||
+ ")\\..*$");
|
||||
|
||||
private final ClassLoader parent;
|
||||
|
||||
@@ -51,8 +51,7 @@ public class PluginResolver {
|
||||
final List<URL> resources = resolveUrlsForPluginPath(path);
|
||||
plugins.add(new ExternalPlugin(
|
||||
path.toUri().toURL(),
|
||||
resources.toArray(new URL[0]),
|
||||
computeJarCrc32(path)
|
||||
resources.toArray(new URL[0])
|
||||
));
|
||||
}
|
||||
} catch (final InvalidPathException | MalformedURLException e) {
|
||||
@@ -124,33 +123,5 @@ public class PluginResolver {
|
||||
|
||||
return urls;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compute a CRC32 of the JAR File without reading the whole file
|
||||
*
|
||||
* @param location of the JAR File.
|
||||
* @return the CRC32 of {@code -1} if the checksum can't be computed.
|
||||
*/
|
||||
private static long computeJarCrc32(final Path location) {
|
||||
CRC32 crc = new CRC32();
|
||||
try (JarFile jar = new JarFile(location.toFile(), false)) {
|
||||
Enumeration<JarEntry> entries = jar.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
JarEntry entry = entries.nextElement();
|
||||
crc.update(entry.getName().getBytes());
|
||||
crc.update(longToBytes(entry.getSize()));
|
||||
crc.update(longToBytes(entry.getCrc()));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return -1;
|
||||
}
|
||||
return crc.getValue();
|
||||
}
|
||||
|
||||
private static byte[] longToBytes(long x) {
|
||||
ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES);
|
||||
buffer.putLong(x);
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -144,7 +144,7 @@ public final class PluginDeserializer<T extends Plugin> extends JsonDeserializer
|
||||
|
||||
static String extractPluginRawIdentifier(final JsonNode node, final boolean isVersioningSupported) {
|
||||
String type = Optional.ofNullable(node.get(TYPE)).map(JsonNode::textValue).orElse(null);
|
||||
String version = Optional.ofNullable(node.get(VERSION)).map(JsonNode::textValue).orElse(null);
|
||||
String version = Optional.ofNullable(node.get(VERSION)).map(JsonNode::asText).orElse(null);
|
||||
|
||||
if (type == null || type.isEmpty()) {
|
||||
return null;
|
||||
|
||||
@@ -27,6 +27,7 @@ public interface QueueFactoryInterface {
|
||||
String CLUSTER_EVENT_NAMED = "clusterEventQueue";
|
||||
String SUBFLOWEXECUTIONEND_NAMED = "subflowExecutionEndQueue";
|
||||
String EXECUTION_RUNNING_NAMED = "executionRunningQueue";
|
||||
String MULTIPLE_CONDITION_EVENT_NAMED = "multipleConditionEventQueue";
|
||||
|
||||
QueueInterface<Execution> execution();
|
||||
|
||||
@@ -59,4 +60,6 @@ public interface QueueFactoryInterface {
|
||||
QueueInterface<SubflowExecutionEnd> subflowExecutionEnd();
|
||||
|
||||
QueueInterface<ExecutionRunning> executionRunning();
|
||||
|
||||
QueueInterface<MultipleConditionEvent> multipleConditionEvent();
|
||||
}
|
||||
|
||||
@@ -25,8 +25,6 @@ import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Execution>, QueryBuilderInterface<Executions.Fields> {
|
||||
Boolean isTaskRunEnabled();
|
||||
|
||||
default Optional<Execution> findById(String tenantId, String id) {
|
||||
return findById(tenantId, id, false);
|
||||
}
|
||||
@@ -96,24 +94,19 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
|
||||
Flux<Execution> findAllAsync(@Nullable String tenantId);
|
||||
|
||||
ArrayListTotal<TaskRun> findTaskRun(
|
||||
Pageable pageable,
|
||||
@Nullable String tenantId,
|
||||
List<QueryFilter> filters
|
||||
);
|
||||
|
||||
Execution delete(Execution execution);
|
||||
|
||||
Integer purge(Execution execution);
|
||||
|
||||
Integer purge(List<Execution> executions);
|
||||
|
||||
List<DailyExecutionStatistics> dailyStatisticsForAllTenants(
|
||||
@Nullable String query,
|
||||
@Nullable String namespace,
|
||||
@Nullable String flowId,
|
||||
@Nullable ZonedDateTime startDate,
|
||||
@Nullable ZonedDateTime endDate,
|
||||
@Nullable DateUtils.GroupType groupBy,
|
||||
boolean isTaskRun
|
||||
@Nullable DateUtils.GroupType groupBy
|
||||
);
|
||||
|
||||
List<DailyExecutionStatistics> dailyStatistics(
|
||||
@@ -125,8 +118,7 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
@Nullable ZonedDateTime startDate,
|
||||
@Nullable ZonedDateTime endDate,
|
||||
@Nullable DateUtils.GroupType groupBy,
|
||||
List<State.Type> state,
|
||||
boolean isTaskRun
|
||||
List<State.Type> state
|
||||
);
|
||||
|
||||
@Getter
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.models.SearchResult;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.*;
|
||||
import io.kestra.plugin.core.dashboard.data.Flows;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
@@ -11,7 +12,7 @@ import jakarta.validation.ConstraintViolationException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface FlowRepositoryInterface {
|
||||
public interface FlowRepositoryInterface extends QueryBuilderInterface<Flows.Fields> {
|
||||
|
||||
Optional<Flow> findById(String tenantId, String namespace, String id, Optional<Integer> revision, Boolean allowDeleted);
|
||||
|
||||
@@ -162,4 +163,6 @@ public interface FlowRepositoryInterface {
|
||||
FlowWithSource update(GenericFlow flow, FlowInterface previous) throws ConstraintViolationException;
|
||||
|
||||
FlowWithSource delete(FlowInterface flow);
|
||||
|
||||
Boolean existAnyNoAcl(String tenantId);
|
||||
}
|
||||
|
||||
@@ -83,7 +83,9 @@ public class LocalFlowRepositoryLoader {
|
||||
}
|
||||
|
||||
public void load(String tenantId, File basePath) throws IOException {
|
||||
Map<String, FlowInterface> flowByUidInRepository = flowRepository.findAllForAllTenants().stream()
|
||||
Map<String, FlowInterface> flowByUidInRepository = flowRepository.findAllForAllTenants()
|
||||
.stream()
|
||||
.filter(flow -> tenantId.equals(flow.getTenantId()))
|
||||
.collect(Collectors.toMap(FlowId::uidWithoutRevision, Function.identity()));
|
||||
|
||||
try (Stream<Path> pathStream = Files.walk(basePath.toPath())) {
|
||||
|
||||
@@ -90,6 +90,8 @@ public interface LogRepositoryInterface extends SaveRepositoryInterface<LogEntry
|
||||
|
||||
Integer purge(Execution execution);
|
||||
|
||||
Integer purge(List<Execution> executions);
|
||||
|
||||
void deleteByQuery(String tenantId, String executionId, String taskId, String taskRunId, Level minLevel, Integer attempt);
|
||||
|
||||
void deleteByQuery(String tenantId, String namespace, String flowId, String triggerId);
|
||||
|
||||
@@ -29,6 +29,8 @@ public interface MetricRepositoryInterface extends SaveRepositoryInterface<Metri
|
||||
|
||||
Integer purge(Execution execution);
|
||||
|
||||
Integer purge(List<Execution> executions);
|
||||
|
||||
Flux<MetricEntry> findAllAsync(@Nullable String tenantId);
|
||||
|
||||
default Function<String, String> sortMapping() throws IllegalArgumentException {
|
||||
|
||||
@@ -5,10 +5,7 @@ import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.executions.*;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.FlowWithException;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.*;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.tasks.ExecutableTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
@@ -29,6 +26,7 @@ import org.apache.commons.lang3.stream.Streams;
|
||||
import java.time.Instant;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static io.kestra.core.trace.Tracer.throwCallable;
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
@@ -153,14 +151,22 @@ public final class ExecutableUtils {
|
||||
currentFlow.getNamespace(),
|
||||
currentFlow.getId()
|
||||
)
|
||||
.orElseThrow(() -> new IllegalStateException("Unable to find flow '" + subflowNamespace + "'.'" + subflowId + "' with revision '" + subflowRevision.orElse(0) + "'"));
|
||||
.orElseThrow(() -> {
|
||||
String msg = "Unable to find flow '" + subflowNamespace + "'.'" + subflowId + "' with revision '" + subflowRevision.orElse(0) + "'";
|
||||
runContext.logger().error(msg);
|
||||
return new IllegalStateException(msg);
|
||||
});
|
||||
|
||||
if (flow.isDisabled()) {
|
||||
throw new IllegalStateException("Cannot execute a flow which is disabled");
|
||||
String msg = "Cannot execute a flow which is disabled";
|
||||
runContext.logger().error(msg);
|
||||
throw new IllegalStateException(msg);
|
||||
}
|
||||
|
||||
if (flow instanceof FlowWithException fwe) {
|
||||
throw new IllegalStateException("Cannot execute an invalid flow: " + fwe.getException());
|
||||
String msg = "Cannot execute an invalid flow: " + fwe.getException();
|
||||
runContext.logger().error(msg);
|
||||
throw new IllegalStateException(msg);
|
||||
}
|
||||
|
||||
List<Label> newLabels = inheritLabels ? new ArrayList<>(filterLabels(currentExecution.getLabels(), flow)) : new ArrayList<>(systemLabels(currentExecution));
|
||||
@@ -201,7 +207,20 @@ public final class ExecutableUtils {
|
||||
.build()
|
||||
)
|
||||
.withScheduleDate(scheduleOnDate);
|
||||
|
||||
if(execution.getInputs().size()<inputs.size()) {
|
||||
Map<String,Object>resolvedInputs=execution.getInputs();
|
||||
for (var inputKey : inputs.keySet()) {
|
||||
if (!resolvedInputs.containsKey(inputKey)) {
|
||||
runContext.logger().warn(
|
||||
"Input {} was provided by parent execution {} for subflow {}.{} but isn't declared at the subflow inputs",
|
||||
inputKey,
|
||||
currentExecution.getId(),
|
||||
currentTask.subflowId().namespace(),
|
||||
currentTask.subflowId().flowId()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
// inject the traceparent into the new execution
|
||||
propagator.ifPresent(pg -> pg.inject(Context.current(), execution, ExecutionTextMapSetter.INSTANCE));
|
||||
|
||||
|
||||
@@ -32,5 +32,7 @@ public class ExecutionRunning implements HasUID {
|
||||
return IdUtils.fromPartsAndSeparator('|', this.tenantId, this.namespace, this.flowId, this.execution.getId());
|
||||
}
|
||||
|
||||
public enum ConcurrencyState { CREATED, RUNNING, QUEUED, CANCELLED, FAILED }
|
||||
// Note: the KILLED state is only used in the Kafka implementation to difference between purging terminated running execution (null)
|
||||
// and purging killed execution which need special treatment
|
||||
public enum ConcurrencyState { CREATED, RUNNING, QUEUED, CANCELLED, FAILED, KILLED }
|
||||
}
|
||||
|
||||
@@ -49,15 +49,7 @@ import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalTime;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.regex.Matcher;
|
||||
@@ -231,6 +223,19 @@ public class FlowInputOutput {
|
||||
return new AbstractMap.SimpleEntry<>(it.input().getId(), it.value());
|
||||
})
|
||||
.collect(HashMap::new, (m,v)-> m.put(v.getKey(), v.getValue()), HashMap::putAll);
|
||||
if (resolved.size() < data.size()) {
|
||||
RunContext runContext = runContextFactory.of(flow, execution);
|
||||
for (var inputKey : data.keySet()) {
|
||||
if (!resolved.containsKey(inputKey)) {
|
||||
runContext.logger().warn(
|
||||
"Input {} was provided for workflow {}.{} but isn't declared in the workflow inputs",
|
||||
inputKey,
|
||||
flow.getNamespace(),
|
||||
flow.getId()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
return MapUtils.flattenToNestedMap(resolved);
|
||||
}
|
||||
|
||||
@@ -313,15 +318,15 @@ public class FlowInputOutput {
|
||||
});
|
||||
resolvable.setInput(input);
|
||||
|
||||
|
||||
|
||||
Object value = resolvable.get().value();
|
||||
|
||||
|
||||
// resolve default if needed
|
||||
if (value == null && input.getDefaults() != null) {
|
||||
value = resolveDefaultValue(input, runContext);
|
||||
resolvable.isDefault(true);
|
||||
}
|
||||
|
||||
|
||||
// validate and parse input value
|
||||
if (value == null) {
|
||||
if (input.getRequired()) {
|
||||
@@ -350,7 +355,7 @@ public class FlowInputOutput {
|
||||
|
||||
return resolvable.get();
|
||||
}
|
||||
|
||||
|
||||
public static Object resolveDefaultValue(Input<?> input, PropertyContext renderer) throws IllegalVariableEvaluationException {
|
||||
return switch (input.getType()) {
|
||||
case STRING, ENUM, SELECT, SECRET, EMAIL -> resolveDefaultPropertyAs(input, renderer, String.class);
|
||||
@@ -367,7 +372,7 @@ public class FlowInputOutput {
|
||||
case MULTISELECT -> resolveDefaultPropertyAsList(input, renderer, String.class);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||
return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
|
||||
@@ -376,7 +381,7 @@ public class FlowInputOutput {
|
||||
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||
return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
|
||||
}
|
||||
|
||||
|
||||
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies) {
|
||||
Map<String, Object> flattenInputs = MapUtils.flattenToNestedMap(dependencies.entrySet()
|
||||
.stream()
|
||||
@@ -453,7 +458,7 @@ public class FlowInputOutput {
|
||||
if (data.getType() == null) {
|
||||
return Optional.of(new AbstractMap.SimpleEntry<>(data.getId(), current));
|
||||
}
|
||||
|
||||
|
||||
final Type elementType = data instanceof ItemTypeInterface itemTypeInterface ? itemTypeInterface.getItemType() : null;
|
||||
|
||||
return Optional.of(new AbstractMap.SimpleEntry<>(
|
||||
@@ -530,17 +535,17 @@ public class FlowInputOutput {
|
||||
throw new Exception("Expected `" + type + "` but received `" + current + "` with errors:\n```\n" + e.getMessage() + "\n```");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
|
||||
if (outputs == null) return Map.of();
|
||||
|
||||
|
||||
// render required outputs
|
||||
Map<String, Object> outputsById = outputs
|
||||
.stream()
|
||||
.filter(output -> output.getRequired() == null || output.getRequired())
|
||||
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
|
||||
outputsById = runContext.render(outputsById);
|
||||
|
||||
|
||||
// render optional outputs one by one to catch, log, and skip any error.
|
||||
for (io.kestra.core.models.flows.Output output : outputs) {
|
||||
if (Boolean.FALSE.equals(output.getRequired())) {
|
||||
@@ -583,9 +588,9 @@ public class FlowInputOutput {
|
||||
}
|
||||
|
||||
public void isDefault(boolean isDefault) {
|
||||
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exception());
|
||||
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exception());
|
||||
}
|
||||
|
||||
|
||||
public void setInput(final Input<?> input) {
|
||||
this.input = new InputAndValue(input, this.input.value(), this.input.enabled(), this.input.isDefault(), this.input.exception());
|
||||
}
|
||||
|
||||
@@ -500,7 +500,7 @@ public class FlowableUtils {
|
||||
|
||||
ArrayList<ResolvedTask> result = new ArrayList<>();
|
||||
|
||||
int index = 0;
|
||||
int iteration = 0;
|
||||
for (Object current : distinctValue) {
|
||||
try {
|
||||
String resolvedValue = current instanceof String stringValue ? stringValue : MAPPER.writeValueAsString(current);
|
||||
@@ -508,7 +508,7 @@ public class FlowableUtils {
|
||||
result.add(ResolvedTask.builder()
|
||||
.task(task)
|
||||
.value(resolvedValue)
|
||||
.iteration(index++)
|
||||
.iteration(iteration)
|
||||
.parentId(parentTaskRun.getId())
|
||||
.build()
|
||||
);
|
||||
@@ -516,6 +516,7 @@ public class FlowableUtils {
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalVariableEvaluationException(e);
|
||||
}
|
||||
iteration++;
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
|
||||
public record MultipleConditionEvent(Flow flow, Execution execution) implements HasUID {
|
||||
@Override
|
||||
public String uid() {
|
||||
return IdUtils.fromParts(flow.uidWithoutRevision(), execution.getId());
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.input.SecretInput;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.property.PropertyContext;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
@@ -282,15 +283,15 @@ public final class RunVariables {
|
||||
|
||||
if (flow != null && flow.getInputs() != null) {
|
||||
// we add default inputs value from the flow if not already set, this will be useful for triggers
|
||||
flow.getInputs().stream()
|
||||
.filter(input -> input.getDefaults() != null && !inputs.containsKey(input.getId()))
|
||||
.forEach(input -> {
|
||||
try {
|
||||
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, propertyContext));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
throw new RuntimeException("Unable to inject default value for input '" + input.getId() + "'", e);
|
||||
}
|
||||
});
|
||||
flow.getInputs().stream()
|
||||
.filter(input -> input.getDefaults() != null && !inputs.containsKey(input.getId()))
|
||||
.forEach(input -> {
|
||||
try {
|
||||
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, propertyContext));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
// Silent catch, if an input depends on another input, or a variable that is populated at runtime / input filling time, we can't resolve it here.
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!inputs.isEmpty()) {
|
||||
|
||||
@@ -45,7 +45,7 @@ final class Secret {
|
||||
for (var entry: data.entrySet()) {
|
||||
if (entry.getValue() instanceof Map map) {
|
||||
// if some value are of type EncryptedString we decode them and replace the object
|
||||
if (EncryptedString.TYPE.equalsIgnoreCase((String)map.get("type"))) {
|
||||
if (map.get("type") instanceof String typeStr && EncryptedString.TYPE.equalsIgnoreCase(typeStr)) {
|
||||
try {
|
||||
String decoded = decrypt((String) map.get("value"));
|
||||
decryptedMap.put(entry.getKey(), decoded);
|
||||
|
||||
@@ -168,6 +168,7 @@ public class Extension extends AbstractExtension {
|
||||
functions.put("randomPort", new RandomPortFunction());
|
||||
functions.put("fileExists", fileExistsFunction);
|
||||
functions.put("isFileEmpty", isFileEmptyFunction);
|
||||
functions.put("nanoId", new NanoIDFunction());
|
||||
functions.put("tasksWithState", new TasksWithStateFunction());
|
||||
functions.put(HttpFunction.NAME, httpFunction);
|
||||
return functions;
|
||||
|
||||
@@ -30,6 +30,6 @@ public class TimestampMicroFilter extends AbstractDate implements Filter {
|
||||
ZoneId zoneId = zoneId(timeZone);
|
||||
ZonedDateTime date = convert(input, zoneId, existingFormat);
|
||||
|
||||
return String.valueOf(TimeUnit.SECONDS.toNanos(date.toEpochSecond()) + TimeUnit.NANOSECONDS.toMicros(date.getNano()));
|
||||
return String.valueOf(TimeUnit.SECONDS.toMicros(date.toEpochSecond()) + TimeUnit.NANOSECONDS.toMicros(date.getNano()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ import io.kestra.core.http.HttpRequest;
|
||||
import io.kestra.core.http.HttpResponse;
|
||||
import io.kestra.core.http.client.HttpClient;
|
||||
import io.kestra.core.http.client.HttpClientException;
|
||||
import io.kestra.core.http.client.HttpClientRequestException;
|
||||
import io.kestra.core.http.client.HttpClientResponseException;
|
||||
import io.kestra.core.http.client.configurations.HttpConfiguration;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
@@ -101,8 +103,15 @@ public class HttpFunction<T> implements Function {
|
||||
try (HttpClient httpClient = new HttpClient(runContext, httpConfiguration)) {
|
||||
HttpResponse<Object> response = httpClient.request(httpRequest, Object.class);
|
||||
return response.getBody();
|
||||
} catch (HttpClientException | IllegalVariableEvaluationException | IOException e) {
|
||||
throw new PebbleException(e, "Unable to execute HTTP request", lineNumber, self.getName());
|
||||
} catch (HttpClientResponseException e) {
|
||||
if (e.getResponse() != null) {
|
||||
String msg = "Failed to execute HTTP Request, server respond with status " + e.getResponse().getStatus().getCode() + " : " + e.getResponse().getStatus().getReason();
|
||||
throw new PebbleException(e, msg , lineNumber, self.getName());
|
||||
} else {
|
||||
throw new PebbleException( e, "Failed to execute HTTP request ", lineNumber, self.getName());
|
||||
}
|
||||
} catch(HttpClientException | IllegalVariableEvaluationException | IOException e ) {
|
||||
throw new PebbleException( e, "Failed to execute HTTP request ", lineNumber, self.getName());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
package io.kestra.core.runners.pebble.functions;
|
||||
|
||||
import io.pebbletemplates.pebble.error.PebbleException;
|
||||
import io.pebbletemplates.pebble.extension.Function;
|
||||
import io.pebbletemplates.pebble.template.EvaluationContext;
|
||||
import io.pebbletemplates.pebble.template.PebbleTemplate;
|
||||
|
||||
import java.security.SecureRandom;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class NanoIDFunction implements Function {
|
||||
|
||||
private static final int DEFAULT_LENGTH = 21;
|
||||
private static final char[] DEFAULT_ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_".toCharArray();
|
||||
private static final SecureRandom secureRandom = new SecureRandom();
|
||||
|
||||
private static final String LENGTH = "length";
|
||||
private static final String ALPHABET = "alphabet";
|
||||
|
||||
private static final int MAX_LENGTH = 1000;
|
||||
|
||||
@Override
|
||||
public Object execute(
|
||||
Map<String, Object> args, PebbleTemplate self, EvaluationContext context, int lineNumber) {
|
||||
int length = DEFAULT_LENGTH;
|
||||
if (args.containsKey(LENGTH) && (args.get(LENGTH) instanceof Long)) {
|
||||
length = parseLength(args, self, lineNumber);
|
||||
}
|
||||
char[] alphabet = DEFAULT_ALPHABET;
|
||||
if (args.containsKey(ALPHABET) && (args.get(ALPHABET) instanceof String)) {
|
||||
alphabet = ((String) args.get(ALPHABET)).toCharArray();
|
||||
}
|
||||
return createNanoID(length, alphabet);
|
||||
}
|
||||
|
||||
private static int parseLength(Map<String, Object> args, PebbleTemplate self, int lineNumber) {
|
||||
var value = (Long) args.get(LENGTH);
|
||||
if(value > MAX_LENGTH) {
|
||||
throw new PebbleException(
|
||||
null,
|
||||
"The 'nanoId()' function field 'length' must be lower than: " + MAX_LENGTH,
|
||||
lineNumber,
|
||||
self.getName());
|
||||
}
|
||||
return Math.toIntExact(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getArgumentNames() {
|
||||
return List.of(LENGTH,ALPHABET);
|
||||
}
|
||||
|
||||
String createNanoID(int length, char[] alphabet){
|
||||
final char[] data = new char[length];
|
||||
final byte[] bytes = new byte[length];
|
||||
final int mask = alphabet.length-1;
|
||||
secureRandom.nextBytes(bytes);
|
||||
for (int i = 0; i < length; ++i) {
|
||||
data[i] = alphabet[bytes[i] & mask];
|
||||
}
|
||||
return String.valueOf(data);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -180,23 +180,13 @@ public final class FileSerde {
|
||||
}
|
||||
|
||||
private static <T> MappingIterator<T> createMappingIterator(ObjectMapper objectMapper, Reader reader, TypeReference<T> type) throws IOException {
|
||||
// See https://github.com/FasterXML/jackson-dataformats-binary/issues/493
|
||||
// There is a limitation with the MappingIterator that cannot differentiate between an array of things (of whatever shape)
|
||||
// and a sequence/stream of things (of Array shape).
|
||||
// To work around that, we need to create a JsonParser and advance to the first token.
|
||||
try (var parser = objectMapper.createParser(reader)) {
|
||||
parser.nextToken();
|
||||
return objectMapper.readerFor(type).readValues(parser);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> MappingIterator<T> createMappingIterator(ObjectMapper objectMapper, Reader reader, Class<T> type) throws IOException {
|
||||
// See https://github.com/FasterXML/jackson-dataformats-binary/issues/493
|
||||
// There is a limitation with the MappingIterator that cannot differentiate between an array of things (of whatever shape)
|
||||
// and a sequence/stream of things (of Array shape).
|
||||
// To work around that, we need to create a JsonParser and advance to the first token.
|
||||
try (var parser = objectMapper.createParser(reader)) {
|
||||
parser.nextToken();
|
||||
return objectMapper.readerFor(type).readValues(parser);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,31 +163,28 @@ public final class JacksonMapper {
|
||||
.build();
|
||||
}
|
||||
|
||||
public static Pair<JsonNode, JsonNode> getBiDirectionalDiffs(Object previous, Object current) {
|
||||
JsonNode previousJson = MAPPER.valueToTree(previous);
|
||||
JsonNode newJson = MAPPER.valueToTree(current);
|
||||
public static Pair<JsonNode, JsonNode> getBiDirectionalDiffs(Object before, Object after) {
|
||||
JsonNode beforeNode = MAPPER.valueToTree(before);
|
||||
JsonNode afterNode = MAPPER.valueToTree(after);
|
||||
|
||||
JsonNode patchPrevToNew = JsonDiff.asJson(previousJson, newJson);
|
||||
JsonNode patchNewToPrev = JsonDiff.asJson(newJson, previousJson);
|
||||
JsonNode patch = JsonDiff.asJson(beforeNode, afterNode);
|
||||
JsonNode revert = JsonDiff.asJson(afterNode, beforeNode);
|
||||
|
||||
return Pair.of(patchPrevToNew, patchNewToPrev);
|
||||
return Pair.of(patch, revert);
|
||||
}
|
||||
|
||||
public static String applyPatches(Object object, List<JsonNode> patches) throws JsonProcessingException {
|
||||
|
||||
public static JsonNode applyPatchesOnJsonNode(JsonNode jsonObject, List<JsonNode> patches) {
|
||||
for (JsonNode patch : patches) {
|
||||
try {
|
||||
// Required for ES
|
||||
if (patch.findValue("value") == null) {
|
||||
((ObjectNode) patch.get(0)).set("value", (JsonNode) null);
|
||||
if (patch.findValue("value") == null && !patch.isEmpty()) {
|
||||
((ObjectNode) patch.get(0)).set("value", null);
|
||||
}
|
||||
JsonNode current = MAPPER.valueToTree(object);
|
||||
object = JsonPatch.fromJson(patch).apply(current);
|
||||
jsonObject = JsonPatch.fromJson(patch).apply(jsonObject);
|
||||
} catch (IOException | JsonPatchException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return MAPPER.writeValueAsString(object);
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -56,8 +56,7 @@ import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
import static io.kestra.core.utils.Rethrow.throwPredicate;
|
||||
import static io.kestra.core.utils.Rethrow.*;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
@@ -431,7 +430,8 @@ public class ExecutionService {
|
||||
@Nullable String flowId,
|
||||
@Nullable ZonedDateTime startDate,
|
||||
@Nullable ZonedDateTime endDate,
|
||||
@Nullable List<State.Type> state
|
||||
@Nullable List<State.Type> state,
|
||||
int batchSize
|
||||
) throws IOException {
|
||||
PurgeResult purgeResult = this.executionRepository
|
||||
.find(
|
||||
@@ -448,24 +448,27 @@ public class ExecutionService {
|
||||
null,
|
||||
true
|
||||
)
|
||||
.map(throwFunction(execution -> {
|
||||
.buffer(batchSize)
|
||||
.map(throwFunction(executions -> {
|
||||
PurgeResult.PurgeResultBuilder<?, ?> builder = PurgeResult.builder();
|
||||
|
||||
if (purgeExecution) {
|
||||
builder.executionsCount(this.executionRepository.purge(execution));
|
||||
builder.executionsCount(this.executionRepository.purge(executions));
|
||||
}
|
||||
|
||||
if (purgeLog) {
|
||||
builder.logsCount(this.logRepository.purge(execution));
|
||||
builder.logsCount(this.logRepository.purge(executions));
|
||||
}
|
||||
|
||||
if (purgeMetric) {
|
||||
builder.metricsCount(this.metricRepository.purge(execution));
|
||||
builder.metricsCount(this.metricRepository.purge(executions));
|
||||
}
|
||||
|
||||
if (purgeStorage) {
|
||||
URI uri = StorageContext.forExecution(execution).getExecutionStorageURI(StorageContext.KESTRA_SCHEME);
|
||||
builder.storagesCount(storageInterface.deleteByPrefix(execution.getTenantId(), execution.getNamespace(), uri).size());
|
||||
executions.forEach(throwConsumer(execution -> {
|
||||
URI uri = StorageContext.forExecution(execution).getExecutionStorageURI(StorageContext.KESTRA_SCHEME);
|
||||
builder.storagesCount(storageInterface.deleteByPrefix(execution.getTenantId(), execution.getNamespace(), uri).size());
|
||||
}));
|
||||
}
|
||||
|
||||
return (PurgeResult) builder.build();
|
||||
@@ -716,7 +719,8 @@ public class ExecutionService {
|
||||
newExecution = execution.withState(killingOrAfterKillState);
|
||||
}
|
||||
|
||||
eventPublisher.publishEvent(new CrudEvent<>(newExecution, execution, CrudEventType.UPDATE));
|
||||
// Because this method is expected to be called by the Executor we can return the Execution
|
||||
// immediately without publishing a CrudEvent like it's done on pause/resume method.
|
||||
return newExecution;
|
||||
}
|
||||
public Execution kill(Execution execution, FlowInterface flow) {
|
||||
|
||||
@@ -3,12 +3,7 @@ package io.kestra.core.services;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.kestra.core.exceptions.FlowProcessingException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowId;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.FlowWithException;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.flows.*;
|
||||
import io.kestra.core.models.tasks.RunnableTask;
|
||||
import io.kestra.core.models.topologies.FlowTopology;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
@@ -30,16 +25,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -551,23 +537,24 @@ public class FlowService {
|
||||
return expandAll ? recursiveFlowTopology(new ArrayList<>(), tenant, namespace, id, destinationOnly) : flowTopologyRepository.get().findByFlow(tenant, namespace, id, destinationOnly).stream();
|
||||
}
|
||||
|
||||
private Stream<FlowTopology> recursiveFlowTopology(List<FlowId> flowIds, String tenantId, String namespace, String id, boolean destinationOnly) {
|
||||
private Stream<FlowTopology> recursiveFlowTopology(List<String> visitedTopologies, String tenantId, String namespace, String id, boolean destinationOnly) {
|
||||
if (flowTopologyRepository.isEmpty()) {
|
||||
throw noRepositoryException();
|
||||
}
|
||||
|
||||
List<FlowTopology> flowTopologies = flowTopologyRepository.get().findByFlow(tenantId, namespace, id, destinationOnly);
|
||||
|
||||
FlowId flowId = FlowId.of(tenantId, namespace, id, null);
|
||||
if (flowIds.contains(flowId)) {
|
||||
return flowTopologies.stream();
|
||||
}
|
||||
flowIds.add(flowId);
|
||||
var flowTopologies = flowTopologyRepository.get().findByFlow(tenantId, namespace, id, destinationOnly);
|
||||
|
||||
return flowTopologies.stream()
|
||||
.flatMap(topology -> Stream.of(topology.getDestination(), topology.getSource()))
|
||||
// recursively fetch child nodes
|
||||
.flatMap(node -> recursiveFlowTopology(flowIds, node.getTenantId(), node.getNamespace(), node.getId(), destinationOnly));
|
||||
// ignore already visited topologies
|
||||
.filter(x -> !visitedTopologies.contains(x.uid()))
|
||||
.flatMap(topology -> {
|
||||
visitedTopologies.add(topology.uid());
|
||||
Stream<FlowTopology> subTopologies = Stream
|
||||
.of(topology.getDestination(), topology.getSource())
|
||||
// recursively visit children and parents nodes
|
||||
.flatMap(relationNode -> recursiveFlowTopology(visitedTopologies, relationNode.getTenantId(), relationNode.getNamespace(), relationNode.getId(), destinationOnly));
|
||||
return Stream.concat(Stream.of(topology), subTopologies);
|
||||
});
|
||||
}
|
||||
|
||||
private IllegalStateException noRepositoryException() {
|
||||
|
||||
@@ -1,36 +1,45 @@
|
||||
package io.kestra.executor;
|
||||
package io.kestra.core.services;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.TaskRun;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class SkipExecutionService {
|
||||
private volatile List<String> skipExecutions = Collections.emptyList();
|
||||
private volatile List<FlowId> skipFlows = Collections.emptyList();
|
||||
private volatile List<NamespaceId> skipNamespaces = Collections.emptyList();
|
||||
private volatile List<String> skipTenants = Collections.emptyList();
|
||||
private volatile List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
public synchronized void setSkipExecutions(List<String> skipExecutions) {
|
||||
this.skipExecutions = skipExecutions == null ? Collections.emptyList() : skipExecutions;
|
||||
}
|
||||
|
||||
public synchronized void setSkipFlows(List<String> skipFlows) {
|
||||
this.skipFlows = skipFlows == null ? Collections.emptyList() : skipFlows.stream().map(FlowId::from).toList();
|
||||
this.skipFlows = skipFlows == null ? Collections.emptyList() : skipFlows.stream().map(FlowId::from).filter(Objects::nonNull).toList();
|
||||
}
|
||||
|
||||
public synchronized void setSkipNamespaces(List<String> skipNamespaces) {
|
||||
this.skipNamespaces = skipNamespaces == null ? Collections.emptyList() : skipNamespaces.stream().map(NamespaceId::from).toList();
|
||||
this.skipNamespaces = skipNamespaces == null ? Collections.emptyList() : skipNamespaces.stream().map(NamespaceId::from).filter(Objects::nonNull).toList();
|
||||
}
|
||||
|
||||
public synchronized void setSkipTenants(List<String> skipTenants) {
|
||||
this.skipTenants = skipTenants == null ? Collections.emptyList() : skipTenants;
|
||||
}
|
||||
|
||||
public synchronized void setSkipIndexerRecords(List<String> skipIndexerRecords) {
|
||||
this.skipIndexerRecords = skipIndexerRecords == null ? Collections.emptyList() : skipIndexerRecords;
|
||||
}
|
||||
|
||||
/**
|
||||
* Warning: this method didn't check the flow, so it must be used only when neither of the others can be used.
|
||||
*/
|
||||
@@ -46,6 +55,14 @@ public class SkipExecutionService {
|
||||
return skipExecution(taskRun.getTenantId(), taskRun.getNamespace(), taskRun.getFlowId(), taskRun.getExecutionId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip an indexer records based on its key.
|
||||
* @param key the record key as computed by <code>QueueService.key(record)</code>, can be null
|
||||
*/
|
||||
public boolean skipIndexerRecord(@Nullable String key) {
|
||||
return key != null && skipIndexerRecords.contains(key);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
boolean skipExecution(String tenant, String namespace, String flow, String executionId) {
|
||||
return (tenant != null && skipTenants.contains(tenant)) ||
|
||||
@@ -59,19 +76,31 @@ public class SkipExecutionService {
|
||||
}
|
||||
|
||||
record FlowId(String tenant, String namespace, String flow) {
|
||||
static FlowId from(String flowId) {
|
||||
static @Nullable FlowId from(String flowId) {
|
||||
String[] parts = SkipExecutionService.splitIdParts(flowId);
|
||||
if (parts.length == 3) {
|
||||
return new FlowId(parts[0], parts[1], parts[2]);
|
||||
} else if (parts.length == 2) {
|
||||
return new FlowId(null, parts[0], parts[1]);
|
||||
} else {
|
||||
log.error("Invalid flow skip with values: '{}'", flowId);
|
||||
}
|
||||
return new FlowId(null, parts[0], parts[1]);
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
record NamespaceId(String tenant, String namespace) {
|
||||
static NamespaceId from(String namespaceId) {
|
||||
static @Nullable NamespaceId from(String namespaceId) {
|
||||
String[] parts = SkipExecutionService.splitIdParts(namespaceId);
|
||||
return new NamespaceId(parts[0], parts[1]);
|
||||
|
||||
if (parts.length == 2) {
|
||||
return new NamespaceId(parts[0], parts[1]);
|
||||
} else {
|
||||
log.error("Invalid namespace skip with values:'{}'", namespaceId);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.storages.kv;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
|
||||
import java.time.Duration;
|
||||
@@ -9,6 +10,7 @@ import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
@Getter
|
||||
@EqualsAndHashCode
|
||||
public class KVMetadata {
|
||||
private String description;
|
||||
private Instant expirationDate;
|
||||
@@ -17,14 +19,18 @@ public class KVMetadata {
|
||||
if (ttl != null && ttl.isNegative()) {
|
||||
throw new IllegalArgumentException("ttl cannot be negative");
|
||||
}
|
||||
|
||||
|
||||
|
||||
this.description = description;
|
||||
if (ttl != null) {
|
||||
this.expirationDate = Instant.now().plus(ttl);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public KVMetadata(String description, Instant expirationDate) {
|
||||
this.description = description;
|
||||
this.expirationDate = expirationDate;
|
||||
}
|
||||
|
||||
public KVMetadata(Map<String, String> metadata) {
|
||||
if (metadata == null) {
|
||||
return;
|
||||
@@ -46,4 +52,9 @@ public class KVMetadata {
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[description=" + description + ", expirationDate=" + expirationDate + "]";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,9 @@ import io.kestra.core.exceptions.ResourceExpiredException;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.URI;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Pattern;
|
||||
@@ -104,8 +106,33 @@ public interface KVStore {
|
||||
default boolean exists(String key) throws IOException {
|
||||
return list().stream().anyMatch(kvEntry -> kvEntry.key().equals(key));
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Finds a KV entry with associated metadata for a given key.
|
||||
*
|
||||
* @param key the KV entry key.
|
||||
* @return an optional of {@link KVValueAndMetadata}.
|
||||
*
|
||||
* @throws UncheckedIOException if an error occurred while executing the operation on the K/V store.
|
||||
*/
|
||||
default Optional<KVValueAndMetadata> findMetadataAndValue(final String key) throws UncheckedIOException {
|
||||
try {
|
||||
return get(key).flatMap(entry ->
|
||||
{
|
||||
try {
|
||||
return getValue(entry.key()).map(current -> new KVValueAndMetadata(new KVMetadata(entry.description(), entry.expirationDate()), current.value()));
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
} catch (ResourceExpiredException e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
Pattern KEY_VALIDATOR_PATTERN = Pattern.compile("[a-zA-Z0-9][a-zA-Z0-9._-]*");
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,6 +18,7 @@ import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.FlowTopologyRepositoryInterface;
|
||||
import io.kestra.core.services.ConditionService;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.kestra.core.utils.MapUtils;
|
||||
import io.kestra.plugin.core.condition.*;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -175,9 +176,6 @@ public class FlowTopologyService {
|
||||
protected boolean isTriggerChild(Flow parent, Flow child) {
|
||||
List<AbstractTrigger> triggers = ListUtils.emptyOnNull(child.getTriggers());
|
||||
|
||||
// simulated execution: we add a "simulated" label so conditions can know that the evaluation is for a simulated execution
|
||||
Execution execution = Execution.newExecution(parent, (f, e) -> null, List.of(SIMULATED_EXECUTION), Optional.empty());
|
||||
|
||||
// keep only flow trigger
|
||||
List<io.kestra.plugin.core.trigger.Flow> flowTriggers = triggers
|
||||
.stream()
|
||||
@@ -189,13 +187,16 @@ public class FlowTopologyService {
|
||||
return false;
|
||||
}
|
||||
|
||||
// simulated execution: we add a "simulated" label so conditions can know that the evaluation is for a simulated execution
|
||||
Execution execution = Execution.newExecution(parent, (f, e) -> null, List.of(SIMULATED_EXECUTION), Optional.empty());
|
||||
|
||||
boolean conditionMatch = flowTriggers
|
||||
.stream()
|
||||
.flatMap(flow -> ListUtils.emptyOnNull(flow.getConditions()).stream())
|
||||
.allMatch(condition -> validateCondition(condition, parent, execution));
|
||||
|
||||
boolean preconditionMatch = flowTriggers.stream()
|
||||
.anyMatch(flow -> flow.getPreconditions() == null || validateMultipleConditions(flow.getPreconditions().getConditions(), parent, execution));
|
||||
.anyMatch(flow -> flow.getPreconditions() == null || validatePreconditions(flow.getPreconditions(), parent, execution));
|
||||
|
||||
return conditionMatch && preconditionMatch;
|
||||
}
|
||||
@@ -239,11 +240,24 @@ public class FlowTopologyService {
|
||||
}
|
||||
|
||||
private boolean isMandatoryMultipleCondition(Condition condition) {
|
||||
return Stream
|
||||
.of(
|
||||
Expression.class
|
||||
)
|
||||
.anyMatch(aClass -> condition.getClass().isAssignableFrom(aClass));
|
||||
return condition.getClass().isAssignableFrom(Expression.class);
|
||||
}
|
||||
|
||||
private boolean validatePreconditions(io.kestra.plugin.core.trigger.Flow.Preconditions preconditions, FlowInterface child, Execution execution) {
|
||||
boolean upstreamFlowMatched = MapUtils.emptyOnNull(preconditions.getUpstreamFlowsConditions())
|
||||
.values()
|
||||
.stream()
|
||||
.filter(c -> !isFilterCondition(c))
|
||||
.anyMatch(c -> validateCondition(c, child, execution));
|
||||
|
||||
boolean whereMatched = MapUtils.emptyOnNull(preconditions.getWhereConditions())
|
||||
.values()
|
||||
.stream()
|
||||
.filter(c -> !isFilterCondition(c))
|
||||
.allMatch(c -> validateCondition(c, child, execution));
|
||||
|
||||
// to be a dependency, if upstream flow is set it must be either inside it so it's a AND between upstream flow and where
|
||||
return upstreamFlowMatched && whereMatched;
|
||||
}
|
||||
|
||||
private boolean isFilterCondition(Condition condition) {
|
||||
|
||||
@@ -206,22 +206,17 @@ public class MapUtils {
|
||||
|
||||
/**
|
||||
* Utility method that flatten a nested map.
|
||||
* <p>
|
||||
* NOTE: for simplicity, this method didn't allow to flatten maps with conflicting keys that would end up in different flatten keys,
|
||||
* this could be related later if needed by flattening {k1: k2: {k3: v1}, k1: {k4: v2}} to {k1.k2.k3: v1, k1.k4: v2} is prohibited for now.
|
||||
*
|
||||
* @param nestedMap the nested map.
|
||||
* @return the flattened map.
|
||||
*
|
||||
* @throws IllegalArgumentException if any entry contains a map of more than one element.
|
||||
*/
|
||||
public static Map<String, Object> nestedToFlattenMap(@NotNull Map<String, Object> nestedMap) {
|
||||
Map<String, Object> result = new TreeMap<>();
|
||||
|
||||
for (Map.Entry<String, Object> entry : nestedMap.entrySet()) {
|
||||
if (entry.getValue() instanceof Map<?, ?> map) {
|
||||
Map.Entry<String, Object> flatten = flattenEntry(entry.getKey(), (Map<String, Object>) map);
|
||||
result.put(flatten.getKey(), flatten.getValue());
|
||||
Map<String, Object> flatten = flattenEntry(entry.getKey(), (Map<String, Object>) map);
|
||||
result.putAll(flatten);
|
||||
} else {
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
@@ -229,18 +224,19 @@ public class MapUtils {
|
||||
return result;
|
||||
}
|
||||
|
||||
private static Map.Entry<String, Object> flattenEntry(String key, Map<String, Object> value) {
|
||||
if (value.size() > 1) {
|
||||
throw new IllegalArgumentException("You cannot flatten a map with an entry that is a map of more than one element, conflicting key: " + key);
|
||||
private static Map<String, Object> flattenEntry(String key, Map<String, Object> value) {
|
||||
Map<String, Object> result = new TreeMap<>();
|
||||
|
||||
for (Map.Entry<String, Object> entry : value.entrySet()) {
|
||||
String newKey = key + "." + entry.getKey();
|
||||
Object newValue = entry.getValue();
|
||||
if (newValue instanceof Map<?, ?> map) {
|
||||
result.putAll(flattenEntry(newKey, (Map<String, Object>) map));
|
||||
} else {
|
||||
result.put(newKey, newValue);
|
||||
}
|
||||
}
|
||||
|
||||
Map.Entry<String, Object> entry = value.entrySet().iterator().next();
|
||||
String newKey = key + "." + entry.getKey();
|
||||
Object newValue = entry.getValue();
|
||||
if (newValue instanceof Map<?, ?> map) {
|
||||
return flattenEntry(newKey, (Map<String, Object>) map);
|
||||
} else {
|
||||
return Map.entry(newKey, newValue);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,48 +32,84 @@ public class Version implements Comparable<Version> {
|
||||
* @param version the version.
|
||||
* @return a new {@link Version} instance.
|
||||
*/
|
||||
public static Version of(String version) {
|
||||
public static Version of(final Object version) {
|
||||
|
||||
if (version.startsWith("v")) {
|
||||
version = version.substring(1);
|
||||
if (Objects.isNull(version)) {
|
||||
throw new IllegalArgumentException("Invalid version, cannot parse null version");
|
||||
}
|
||||
|
||||
String strVersion = version.toString();
|
||||
|
||||
if (strVersion.startsWith("v")) {
|
||||
strVersion = strVersion.substring(1);
|
||||
}
|
||||
|
||||
int qualifier = version.indexOf("-");
|
||||
int qualifier = strVersion.indexOf("-");
|
||||
|
||||
final String[] versions = qualifier > 0 ?
|
||||
version.substring(0, qualifier).split("\\.") :
|
||||
version.split("\\.");
|
||||
strVersion.substring(0, qualifier).split("\\.") :
|
||||
strVersion.split("\\.");
|
||||
try {
|
||||
final int majorVersion = Integer.parseInt(versions[0]);
|
||||
final int minorVersion = versions.length > 1 ? Integer.parseInt(versions[1]) : 0;
|
||||
final int incrementalVersion = versions.length > 2 ? Integer.parseInt(versions[2]) : 0;
|
||||
final Integer minorVersion = versions.length > 1 ? Integer.parseInt(versions[1]) : null;
|
||||
final Integer incrementalVersion = versions.length > 2 ? Integer.parseInt(versions[2]) : null;
|
||||
|
||||
return new Version(
|
||||
majorVersion,
|
||||
minorVersion,
|
||||
incrementalVersion,
|
||||
qualifier > 0 ? version.substring(qualifier + 1) : null,
|
||||
version
|
||||
qualifier > 0 ? strVersion.substring(qualifier + 1) : null,
|
||||
strVersion
|
||||
);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException("Invalid version, cannot parse '" + version + "'");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Static helper method for returning the most recent stable version for a current {@link Version}.
|
||||
* Resolves the most appropriate stable version from a collection, based on a given input version.
|
||||
* <p>
|
||||
* The matching rules are:
|
||||
* <ul>
|
||||
* <li>If {@code from} specifies only a major version (e.g. {@code 1}), return the latest stable version
|
||||
* with the same major (e.g. {@code 1.2.3}).</li>
|
||||
* <li>If {@code from} specifies a major and minor version only (e.g. {@code 1.2}), return the latest
|
||||
* stable version with the same major and minor (e.g. {@code 1.2.3}).</li>
|
||||
* <li>If {@code from} specifies a full version with major, minor, and patch (e.g. {@code 1.2.2}),
|
||||
* then only return it if it is exactly present (and stable) in {@code versions}.
|
||||
* No "upgrade" is performed in this case.</li>
|
||||
* <li>If no suitable version is found, returns {@code null}.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param from the current version.
|
||||
* @param versions the list of version.
|
||||
*
|
||||
* @return the last stable version.
|
||||
* @param from the reference version (may specify only major, or major+minor, or major+minor+patch).
|
||||
* @param versions the collection of candidate versions to resolve against.
|
||||
* @return the best matching stable version, or {@code null} if none match.
|
||||
*/
|
||||
public static Version getStable(final Version from, final Collection<Version> versions) {
|
||||
List<Version> compatibleVersions = versions.stream()
|
||||
.filter(v -> v.majorVersion() == from.majorVersion() && v.minorVersion() == from.minorVersion())
|
||||
.toList();
|
||||
if (compatibleVersions.isEmpty()) return null;
|
||||
return Version.getLatest(compatibleVersions);
|
||||
// Case 1: "from" is only a major (e.g. 1)
|
||||
if (from.hasOnlyMajor()) {
|
||||
List<Version> sameMajor = versions.stream()
|
||||
.filter(v -> v.majorVersion() == from.majorVersion())
|
||||
.toList();
|
||||
return sameMajor.isEmpty() ? null : Version.getLatest(sameMajor);
|
||||
}
|
||||
|
||||
// Case 2: "from" is major+minor only (e.g. 1.2)
|
||||
if (from.hasMajorAndMinorOnly()) {
|
||||
List<Version> sameMinor = versions.stream()
|
||||
.filter(v -> v.majorVersion() == from.majorVersion()
|
||||
&& v.minorVersion() == from.minorVersion())
|
||||
.toList();
|
||||
return sameMinor.isEmpty() ? null : Version.getLatest(sameMinor);
|
||||
}
|
||||
|
||||
// Case 3: "from" is full version (major+minor+patch)
|
||||
if (versions.contains(from)) {
|
||||
return from;
|
||||
}
|
||||
|
||||
// No match
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -123,8 +159,8 @@ public class Version implements Comparable<Version> {
|
||||
}
|
||||
|
||||
private final int majorVersion;
|
||||
private final int minorVersion;
|
||||
private final int incrementalVersion;
|
||||
private final Integer minorVersion;
|
||||
private final Integer patchVersion;
|
||||
private final Qualifier qualifier;
|
||||
|
||||
private final String originalVersion;
|
||||
@@ -134,14 +170,14 @@ public class Version implements Comparable<Version> {
|
||||
*
|
||||
* @param majorVersion the major version (must be superior or equal to 0).
|
||||
* @param minorVersion the minor version (must be superior or equal to 0).
|
||||
* @param incrementalVersion the incremental version (must be superior or equal to 0).
|
||||
* @param patchVersion the incremental version (must be superior or equal to 0).
|
||||
* @param qualifier the qualifier.
|
||||
*/
|
||||
public Version(final int majorVersion,
|
||||
final int minorVersion,
|
||||
final int incrementalVersion,
|
||||
final int patchVersion,
|
||||
final String qualifier) {
|
||||
this(majorVersion, minorVersion, incrementalVersion, qualifier, null);
|
||||
this(majorVersion, minorVersion, patchVersion, qualifier, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -149,25 +185,25 @@ public class Version implements Comparable<Version> {
|
||||
*
|
||||
* @param majorVersion the major version (must be superior or equal to 0).
|
||||
* @param minorVersion the minor version (must be superior or equal to 0).
|
||||
* @param incrementalVersion the incremental version (must be superior or equal to 0).
|
||||
* @param patchVersion the incremental version (must be superior or equal to 0).
|
||||
* @param qualifier the qualifier.
|
||||
* @param originalVersion the original string version.
|
||||
*/
|
||||
private Version(final int majorVersion,
|
||||
final int minorVersion,
|
||||
final int incrementalVersion,
|
||||
private Version(final Integer majorVersion,
|
||||
final Integer minorVersion,
|
||||
final Integer patchVersion,
|
||||
final String qualifier,
|
||||
final String originalVersion) {
|
||||
this.majorVersion = requirePositive(majorVersion, "major");
|
||||
this.minorVersion = requirePositive(minorVersion, "minor");
|
||||
this.incrementalVersion = requirePositive(incrementalVersion, "incremental");
|
||||
this.patchVersion = requirePositive(patchVersion, "incremental");
|
||||
this.qualifier = qualifier != null ? new Qualifier(qualifier) : null;
|
||||
this.originalVersion = originalVersion;
|
||||
}
|
||||
|
||||
|
||||
private static int requirePositive(int version, final String message) {
|
||||
if (version < 0) {
|
||||
private static Integer requirePositive(Integer version, final String message) {
|
||||
if (version != null && version < 0) {
|
||||
throw new IllegalArgumentException(String.format("The '%s' version must super or equal to 0", message));
|
||||
}
|
||||
return version;
|
||||
@@ -178,11 +214,11 @@ public class Version implements Comparable<Version> {
|
||||
}
|
||||
|
||||
public int minorVersion() {
|
||||
return minorVersion;
|
||||
return minorVersion != null ? minorVersion : 0;
|
||||
}
|
||||
|
||||
public int incrementalVersion() {
|
||||
return incrementalVersion;
|
||||
public int patchVersion() {
|
||||
return patchVersion != null ? patchVersion : 0;
|
||||
}
|
||||
|
||||
public Qualifier qualifier() {
|
||||
@@ -197,9 +233,9 @@ public class Version implements Comparable<Version> {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof Version)) return false;
|
||||
Version version = (Version) o;
|
||||
return majorVersion == version.majorVersion &&
|
||||
minorVersion == version.minorVersion &&
|
||||
incrementalVersion == version.incrementalVersion &&
|
||||
return Objects.equals(majorVersion,version.majorVersion) &&
|
||||
Objects.equals(minorVersion, version.minorVersion) &&
|
||||
Objects.equals(patchVersion,version.patchVersion) &&
|
||||
Objects.equals(qualifier, version.qualifier);
|
||||
}
|
||||
|
||||
@@ -208,7 +244,7 @@ public class Version implements Comparable<Version> {
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(majorVersion, minorVersion, incrementalVersion, qualifier);
|
||||
return Objects.hash(majorVersion, minorVersion, patchVersion, qualifier);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -218,7 +254,7 @@ public class Version implements Comparable<Version> {
|
||||
public String toString() {
|
||||
if (originalVersion != null) return originalVersion;
|
||||
|
||||
String version = majorVersion + "." + minorVersion + "." + incrementalVersion;
|
||||
String version = majorVersion + "." + minorVersion + "." + patchVersion;
|
||||
return (qualifier != null) ? version +"-" + qualifier : version;
|
||||
}
|
||||
|
||||
@@ -238,7 +274,7 @@ public class Version implements Comparable<Version> {
|
||||
return compareMinor;
|
||||
}
|
||||
|
||||
int compareIncremental = Integer.compare(that.incrementalVersion, this.incrementalVersion);
|
||||
int compareIncremental = Integer.compare(that.patchVersion, this.patchVersion);
|
||||
if (compareIncremental != 0) {
|
||||
return compareIncremental;
|
||||
}
|
||||
@@ -253,6 +289,21 @@ public class Version implements Comparable<Version> {
|
||||
|
||||
return this.qualifier.compareTo(that.qualifier);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return true if only major is specified (e.g. "1")
|
||||
*/
|
||||
private boolean hasOnlyMajor() {
|
||||
return minorVersion == null && patchVersion == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if major+minor are specified, but no patch (e.g. "1.2")
|
||||
*/
|
||||
private boolean hasMajorAndMinorOnly() {
|
||||
return minorVersion != null && patchVersion == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether this version is before the given one.
|
||||
|
||||
@@ -46,16 +46,19 @@ public class VersionProvider {
|
||||
this.date = loadTime(gitProperties);
|
||||
this.version = loadVersion(buildProperties, gitProperties);
|
||||
|
||||
// check the version in the settings and update if needed, we did't use it would allow us to detect incompatible update later if needed
|
||||
if (settingRepository.isPresent()) {
|
||||
Optional<Setting> versionSetting = settingRepository.get().findByKey(Setting.INSTANCE_VERSION);
|
||||
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(this.version)) {
|
||||
settingRepository.get().save(Setting.builder()
|
||||
.key(Setting.INSTANCE_VERSION)
|
||||
.value(this.version)
|
||||
.build()
|
||||
);
|
||||
}
|
||||
// check the version in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
|
||||
settingRepository.ifPresent(
|
||||
settingRepositoryInterface -> persistVersion(settingRepositoryInterface, version));
|
||||
}
|
||||
|
||||
private static synchronized void persistVersion(SettingRepositoryInterface settingRepositoryInterface, String version) {
|
||||
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_VERSION);
|
||||
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(version)) {
|
||||
settingRepositoryInterface.save(Setting.builder()
|
||||
.key(Setting.INSTANCE_VERSION)
|
||||
.value(version)
|
||||
.build()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ public class FlowValidator implements ConstraintValidator<FlowValidation, Flow>
|
||||
|
||||
List<String> violations = new ArrayList<>();
|
||||
|
||||
if (RESERVED_FLOW_IDS.contains(value.getId())) {
|
||||
if (value.getId() != null && RESERVED_FLOW_IDS.contains(value.getId())) {
|
||||
violations.add("Flow id is a reserved keyword: " + value.getId() + ". List of reserved keywords: " + String.join(", ", RESERVED_FLOW_IDS));
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
package io.kestra.plugin.core.dashboard.data;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeName;
|
||||
import io.kestra.core.models.annotations.Example;
|
||||
import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.dashboards.ColumnDescriptor;
|
||||
import io.kestra.core.models.dashboards.DataFilter;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.QueryBuilderInterface;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
|
||||
@EqualsAndHashCode
|
||||
@Schema(
|
||||
title = "Display Flow data in a dashboard chart."
|
||||
)
|
||||
@Plugin(
|
||||
examples = {
|
||||
@Example(
|
||||
title = "Display a chart with a list of Flows.",
|
||||
full = true,
|
||||
code = { """
|
||||
charts:
|
||||
- id: list_flows
|
||||
type: io.kestra.plugin.core.dashboard.chart.Table
|
||||
data:
|
||||
type: io.kestra.plugin.core.dashboard.data.Flows
|
||||
columns:
|
||||
namespace:
|
||||
field: NAMESPACE
|
||||
id:
|
||||
field: ID
|
||||
"""
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
@JsonTypeName("Flows")
|
||||
public class Flows<C extends ColumnDescriptor<Flows.Fields>> extends DataFilter<Flows.Fields, C> implements IFlows {
|
||||
@Override
|
||||
public Class<? extends QueryBuilderInterface<Fields>> repositoryClass() {
|
||||
return FlowRepositoryInterface.class;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
package io.kestra.plugin.core.dashboard.data;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeName;
|
||||
import io.kestra.core.models.annotations.Example;
|
||||
import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.dashboards.ColumnDescriptor;
|
||||
import io.kestra.core.models.dashboards.DataFilterKPI;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.QueryBuilderInterface;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
|
||||
@EqualsAndHashCode
|
||||
@Schema(
|
||||
title = "Display a chart with Flows KPI.",
|
||||
description = "Change."
|
||||
)
|
||||
@Plugin(
|
||||
examples = {
|
||||
@Example(
|
||||
title = "Display count of Flows.",
|
||||
full = true,
|
||||
code = { """
|
||||
charts:
|
||||
- id: kpi
|
||||
type: io.kestra.plugin.core.dashboard.chart.KPI
|
||||
data:
|
||||
type: io.kestra.plugin.core.dashboard.data.FlowsKPI
|
||||
columns:
|
||||
field: ID
|
||||
agg: COUNT
|
||||
"""
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
@JsonTypeName("FlowsKPI")
|
||||
public class FlowsKPI<C extends ColumnDescriptor<FlowsKPI.Fields>> extends DataFilterKPI<FlowsKPI.Fields, C> implements IFlows {
|
||||
@Override
|
||||
public Class<? extends QueryBuilderInterface<Fields>> repositoryClass() {
|
||||
return FlowRepositoryInterface.class;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
package io.kestra.plugin.core.dashboard.data;
|
||||
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.models.dashboards.filters.AbstractFilter;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public interface IFlows extends IData<IFlows.Fields> {
|
||||
|
||||
default List<AbstractFilter<IFlows.Fields>> whereWithGlobalFilters(List<QueryFilter> filters, ZonedDateTime startDate, ZonedDateTime endDate, List<AbstractFilter<IFlows.Fields>> where) {
|
||||
List<AbstractFilter<IFlows.Fields>> updatedWhere = where != null ? new ArrayList<>(where) : new ArrayList<>();
|
||||
|
||||
if (ListUtils.isEmpty(filters)) {
|
||||
return updatedWhere;
|
||||
}
|
||||
|
||||
List<QueryFilter> namespaceFilters = filters.stream().filter(f -> f.field().equals(QueryFilter.Field.NAMESPACE)).toList();
|
||||
if (!namespaceFilters.isEmpty()) {
|
||||
updatedWhere.removeIf(filter -> filter.getField().equals(IFlows.Fields.NAMESPACE));
|
||||
namespaceFilters.forEach(f -> {
|
||||
updatedWhere.add(f.toDashboardFilterBuilder(IFlows.Fields.NAMESPACE, f.value()));
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return updatedWhere;
|
||||
}
|
||||
|
||||
enum Fields {
|
||||
ID,
|
||||
NAMESPACE,
|
||||
REVISION
|
||||
}
|
||||
}
|
||||
@@ -73,7 +73,7 @@ public class Assert extends Task implements RunnableTask<VoidOutput> {
|
||||
@PluginProperty(dynamic = true)
|
||||
private List<String> conditions;
|
||||
|
||||
@Schema(title = "Optional error message.")
|
||||
@Schema(title = "Optional error message")
|
||||
private Property<String> errorMessage;
|
||||
|
||||
@Override
|
||||
|
||||
@@ -37,7 +37,7 @@ import static io.kestra.core.utils.Rethrow.throwPredicate;
|
||||
@Plugin(
|
||||
examples = {
|
||||
@Example(
|
||||
title = "Send a slack notification if there is no execution for a flow for the last 24 hours.",
|
||||
title = "Send a slack notification if there is no execution for a flow in the last 24 hours.",
|
||||
full = true,
|
||||
code = """
|
||||
id: executions_count
|
||||
@@ -78,32 +78,32 @@ import static io.kestra.core.utils.Rethrow.throwPredicate;
|
||||
)
|
||||
public class Count extends Task implements RunnableTask<Count.Output> {
|
||||
@Schema(
|
||||
title = "A list of flows to be filtered.",
|
||||
title = "A list of flows to be filtered",
|
||||
description = "If not provided, namespaces must be set."
|
||||
)
|
||||
@PluginProperty
|
||||
protected List<Flow> flows;
|
||||
|
||||
@Schema(
|
||||
title = "A list of states to be filtered."
|
||||
title = "A list of states to be filtered"
|
||||
)
|
||||
protected Property<List<State.Type>> states;
|
||||
|
||||
@NotNull
|
||||
@Schema(
|
||||
title = "The start date."
|
||||
title = "The start date"
|
||||
)
|
||||
protected Property<String> startDate;
|
||||
|
||||
@Schema(
|
||||
title = "The end date."
|
||||
title = "The end date"
|
||||
)
|
||||
protected Property<String> endDate;
|
||||
|
||||
@NotNull
|
||||
@Schema(
|
||||
title = "The expression to look at against each flow.",
|
||||
description = "The expression is such that expression must return `true` in order to keep the current line.\n" +
|
||||
title = "The expression to check against each flow",
|
||||
description = "The expression is such that the expression must return `true` in order to keep the current line.\n" +
|
||||
"Some examples: \n" +
|
||||
"- ```yaml {{ eq count 0 }} ```: no execution found\n" +
|
||||
"- ```yaml {{ gte count 5 }} ```: more than 5 executions\n"
|
||||
@@ -121,7 +121,7 @@ public class Count extends Task implements RunnableTask<Count.Output> {
|
||||
.getBean(ExecutionRepositoryInterface.class);
|
||||
|
||||
if (flows == null && namespaces == null) {
|
||||
throw new IllegalArgumentException("You must provide a list of flows or namespaces");
|
||||
throw new IllegalArgumentException("You must provide a list of flows or namespaces.");
|
||||
}
|
||||
|
||||
var flowInfo = runContext.flowInfo();
|
||||
|
||||
@@ -21,6 +21,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
@@ -68,6 +69,7 @@ import java.util.Optional;
|
||||
)
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class Exit extends Task implements ExecutionUpdatableTask {
|
||||
@NotNull
|
||||
@Schema(
|
||||
@@ -104,12 +106,13 @@ public class Exit extends Task implements ExecutionUpdatableTask {
|
||||
// ends all parents
|
||||
while (newTaskRun.getParentTaskRunId() != null) {
|
||||
newTaskRun = newExecution.findTaskRunByTaskRunId(newTaskRun.getParentTaskRunId()).withState(exitState);
|
||||
newExecution = execution.withTaskRun(newTaskRun);
|
||||
newExecution = newExecution.withTaskRun(newTaskRun);
|
||||
}
|
||||
return newExecution;
|
||||
} catch (InternalException e) {
|
||||
// in case we cannot update the last not terminated task run, we ignore it
|
||||
return execution;
|
||||
log.warn("Unable to update the taskrun state", e);
|
||||
return execution.withState(exitState);
|
||||
}
|
||||
})
|
||||
.orElse(execution)
|
||||
|
||||
@@ -119,7 +119,7 @@ public class Fail extends Task implements RunnableTask<VoidOutput> {
|
||||
)
|
||||
private Property<String> condition;
|
||||
|
||||
@Schema(title = "Optional error message.")
|
||||
@Schema(title = "Optional error message")
|
||||
@Builder.Default
|
||||
private Property<String> errorMessage = Property.ofValue("Task failure");
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Schema(
|
||||
title = "Allow to add or overwrite labels for the current execution at runtime.",
|
||||
title = "Add or overwrite labels for the current execution at runtime.",
|
||||
description = "Trying to pass a system label (a label starting with `system.`) will fail the task."
|
||||
)
|
||||
@Plugin(
|
||||
@@ -76,7 +76,7 @@ public class Labels extends Task implements ExecutionUpdatableTask {
|
||||
private static final ObjectMapper MAPPER = JacksonMapper.ofJson();
|
||||
|
||||
@Schema(
|
||||
title = "Labels to add to the current execution.",
|
||||
title = "Labels to add to the current execution",
|
||||
description = "The value should result in a list of labels or a labelKey:labelValue map",
|
||||
oneOf = {
|
||||
String.class,
|
||||
@@ -127,9 +127,24 @@ public class Labels extends Task implements ExecutionUpdatableTask {
|
||||
}
|
||||
|
||||
// check for system labels: none can be passed at runtime
|
||||
Optional<Map.Entry<String, String>> first = labelsAsMap.entrySet().stream().filter(entry -> entry.getKey().startsWith(SYSTEM_PREFIX)).findFirst();
|
||||
if (first.isPresent()) {
|
||||
throw new IllegalArgumentException("System labels can only be set by Kestra itself, offending label: " + first.get().getKey() + "=" + first.get().getValue());
|
||||
Optional<Map.Entry<String, String>> systemLabel = labelsAsMap.entrySet().stream()
|
||||
.filter(entry -> entry.getKey().startsWith(SYSTEM_PREFIX))
|
||||
.findFirst();
|
||||
if (systemLabel.isPresent()) {
|
||||
throw new IllegalArgumentException(
|
||||
"System labels can only be set by Kestra itself, offending label: " +
|
||||
systemLabel.get().getKey() + "=" + systemLabel.get().getValue()
|
||||
);
|
||||
}
|
||||
|
||||
// check for empty label values
|
||||
Optional<Map.Entry<String, String>> emptyValue = labelsAsMap.entrySet().stream()
|
||||
.filter(entry -> entry.getValue().isEmpty())
|
||||
.findFirst();
|
||||
if (emptyValue.isPresent()) {
|
||||
throw new IllegalArgumentException(
|
||||
"Label values cannot be empty, offending label: " + emptyValue.get().getKey()
|
||||
);
|
||||
}
|
||||
|
||||
Map<String, String> newLabels = ListUtils.emptyOnNull(execution.getLabels()).stream()
|
||||
@@ -140,6 +155,7 @@ public class Labels extends Task implements ExecutionUpdatableTask {
|
||||
newLabels.putAll(labelsAsMap);
|
||||
|
||||
return execution.withLabels(newLabels.entrySet().stream()
|
||||
.filter(Label.getEntryNotEmptyPredicate())
|
||||
.map(entry -> new Label(
|
||||
entry.getKey(),
|
||||
entry.getValue()
|
||||
|
||||
@@ -45,63 +45,71 @@ import java.util.List;
|
||||
)
|
||||
public class PurgeExecutions extends Task implements RunnableTask<PurgeExecutions.Output> {
|
||||
@Schema(
|
||||
title = "Namespace whose flows need to be purged, or namespace of the flow that needs to be purged.",
|
||||
title = "Namespace whose flows need to be purged, or namespace of the flow that needs to be purged",
|
||||
description = "If `flowId` isn't provided, this is a namespace prefix, else the namespace of the flow."
|
||||
)
|
||||
private Property<String> namespace;
|
||||
|
||||
@Schema(
|
||||
title = "The flow ID to be purged.",
|
||||
title = "The flow ID to be purged",
|
||||
description = "You need to provide the `namespace` properties if you want to purge a flow."
|
||||
)
|
||||
private Property<String> flowId;
|
||||
|
||||
@Schema(
|
||||
title = "The minimum date to be purged.",
|
||||
title = "The date after which data should be purged",
|
||||
description = "All data of flows executed after this date will be purged."
|
||||
)
|
||||
private Property<String> startDate;
|
||||
|
||||
@Schema(
|
||||
title = "The maximum date to be purged.",
|
||||
title = "The date before which data should be purged.",
|
||||
description = "All data of flows executed before this date will be purged."
|
||||
)
|
||||
@NotNull
|
||||
private Property<String> endDate;
|
||||
|
||||
@Schema(
|
||||
title = "The state of the executions to be purged.",
|
||||
title = "The state of the executions to be purged",
|
||||
description = "If not set, executions for any states will be purged."
|
||||
)
|
||||
private Property<List<State.Type>> states;
|
||||
|
||||
@Schema(
|
||||
title = "Whether to purge executions."
|
||||
title = "Flag specifying whether to purge executions"
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<Boolean> purgeExecution = Property.ofValue(true);
|
||||
|
||||
@Schema(
|
||||
title = "Whether to purge execution's logs.",
|
||||
title = "Flag specifying whether to purge execution logs",
|
||||
description = """
|
||||
This will only purge logs from executions not from triggers, and it will do it execution by execution.
|
||||
The `io.kestra.plugin.core.log.PurgeLogs` task is a better fit to purge logs as it will purge logs in bulk, and will also purge logs not tied to an execution like trigger logs."""
|
||||
This will only purge logs from executions, not from triggers, and it will do it execution by execution.
|
||||
The `io.kestra.plugin.core.log.PurgeLogs` task is a better fit to purge, as it will purge logs in bulk and will also purge logs not tied to an execution like trigger logs."""
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<Boolean> purgeLog = Property.ofValue(true);
|
||||
|
||||
@Schema(
|
||||
title = "Whether to purge execution's metrics."
|
||||
title = "Flag specifying whether to purge execution's metrics."
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<Boolean> purgeMetric = Property.ofValue(true);
|
||||
|
||||
@Schema(
|
||||
title = "Whether to purge execution's files from the Kestra's internal storage."
|
||||
title = "Flag specifying whether to purge execution's files from the Kestra's internal storage"
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<Boolean> purgeStorage = Property.ofValue(true);
|
||||
|
||||
@Schema(
|
||||
title = "The size of the bulk delete",
|
||||
description = "For performance, deletion is made by batch of by default 100 executions/logs/metrics."
|
||||
)
|
||||
@Builder.Default
|
||||
@NotNull
|
||||
private Property<Integer> batchSize = Property.ofValue(100);
|
||||
|
||||
@Override
|
||||
public PurgeExecutions.Output run(RunContext runContext) throws Exception {
|
||||
ExecutionService executionService = ((DefaultRunContext)runContext).getApplicationContext().getBean(ExecutionService.class);
|
||||
@@ -124,9 +132,10 @@ public class PurgeExecutions extends Task implements RunnableTask<PurgeExecution
|
||||
flowInfo.tenantId(),
|
||||
renderedNamespace,
|
||||
runContext.render(flowId).as(String.class).orElse(null),
|
||||
startDate != null ? ZonedDateTime.parse(runContext.render(startDate).as(String.class).orElseThrow()) : null,
|
||||
runContext.render(startDate).as(String.class).map(ZonedDateTime::parse).orElse(null),
|
||||
ZonedDateTime.parse(runContext.render(endDate).as(String.class).orElseThrow()),
|
||||
this.states == null ? null : runContext.render(this.states).asList(State.Type.class)
|
||||
this.states == null ? null : runContext.render(this.states).asList(State.Type.class),
|
||||
runContext.render(this.batchSize).as(Integer.class).orElseThrow()
|
||||
);
|
||||
|
||||
return Output.builder()
|
||||
@@ -141,22 +150,22 @@ public class PurgeExecutions extends Task implements RunnableTask<PurgeExecution
|
||||
@Getter
|
||||
public static class Output implements io.kestra.core.models.tasks.Output {
|
||||
@Schema(
|
||||
title = "The count of deleted executions."
|
||||
title = "The count of deleted executions"
|
||||
)
|
||||
private int executionsCount;
|
||||
|
||||
@Schema(
|
||||
title = "The count of deleted logs."
|
||||
title = "The count of deleted logs"
|
||||
)
|
||||
private int logsCount;
|
||||
|
||||
@Schema(
|
||||
title = "The count of deleted storage files."
|
||||
title = "The count of deleted storage files"
|
||||
)
|
||||
private int storagesCount;
|
||||
|
||||
@Schema(
|
||||
title = "The count of deleted metrics."
|
||||
title = "The count of deleted metrics"
|
||||
)
|
||||
private int metricsCount;
|
||||
}
|
||||
|
||||
@@ -64,14 +64,14 @@ public class Resume extends Task implements RunnableTask<VoidOutput> {
|
||||
description = """
|
||||
If you explicitly define an `executionId`, Kestra will use that specific ID.
|
||||
|
||||
If another `namespace` and `flowId` properties are set, Kestra will look for a paused execution for that corresponding flow.
|
||||
If `executionId` is not set and `namespace` and `flowId` properties are set, Kestra will look for a paused execution for that corresponding flow.
|
||||
|
||||
If `executionId` is not set, the task will use the ID of the current execution."""
|
||||
)
|
||||
private Property<String> executionId;
|
||||
|
||||
@Schema(
|
||||
title = "Inputs to be passed to the execution when it's resumed."
|
||||
title = "Inputs to be passed to the execution when it's resumed"
|
||||
)
|
||||
private Property<Map<String, Object>> inputs;
|
||||
|
||||
@@ -93,7 +93,7 @@ public class Resume extends Task implements RunnableTask<VoidOutput> {
|
||||
|
||||
Execution execution = executionRepository.findById(executionInfo.tenantId(), executionInfo.id())
|
||||
.orElseThrow(() -> new IllegalArgumentException("No execution found for execution id " + executionInfo.id()));
|
||||
FlowInterface flow = flowExecutor.findByExecution(execution).orElseThrow(() -> new IllegalArgumentException("Flow not found for execution id " + executionInfo.id()));
|
||||
FlowInterface flow = flowExecutor.findByExecution(execution).orElseThrow(() -> new IllegalArgumentException("Flow not found for execution ID " + executionInfo.id()));
|
||||
|
||||
Map<String, Object> renderedInputs = runContext.render(this.inputs).asMap(String.class, Object.class);
|
||||
renderedInputs = !renderedInputs.isEmpty() ? renderedInputs : null;
|
||||
|
||||
@@ -22,7 +22,7 @@ import java.util.Map;
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Schema(
|
||||
title = "Allow to set execution variables. These variables will then be available via the `{{ vars.name }}` expression."
|
||||
title = "Allow to set execution variables. These variables are available via the `{{ vars.name }}` expression."
|
||||
)
|
||||
@Plugin(
|
||||
examples = {
|
||||
@@ -53,7 +53,7 @@ public class SetVariables extends Task implements ExecutionUpdatableTask {
|
||||
@NotNull
|
||||
private Property<Map<String, Object>> variables;
|
||||
|
||||
@Schema(title = "Whether to overwrite existing variables")
|
||||
@Schema(title = "Flag specifying whether to overwrite existing variables")
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
private Property<Boolean> overwrite = Property.ofValue(true);
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.Map;
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Schema(
|
||||
title = "Allow to unset execution variables."
|
||||
title = "Unset execution variables."
|
||||
)
|
||||
@Plugin(
|
||||
examples = {
|
||||
@@ -57,7 +57,7 @@ public class UnsetVariables extends Task implements ExecutionUpdatableTask {
|
||||
@NotNull
|
||||
private Property<List<String>> variables;
|
||||
|
||||
@Schema(title = "Whether to ignore missing variables")
|
||||
@Schema(title = "Flag specifying whether to ignore missing variables")
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
private Property<Boolean> ignoreMissing = Property.ofValue(false);
|
||||
|
||||
@@ -91,7 +91,7 @@ public class Dag extends Task implements FlowableTask<VoidOutput> {
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
@Schema(
|
||||
title = "Number of concurrent parallel tasks that can be running at any point in time.",
|
||||
title = "Number of concurrent parallel tasks that can be running at any point in time",
|
||||
description = "If the value is `0`, no concurrency limit exists for the tasks in a DAG and all tasks that can run in parallel will start at the same time."
|
||||
)
|
||||
private final Property<Integer> concurrent = Property.ofValue(0);
|
||||
@@ -134,7 +134,7 @@ public class Dag extends Task implements FlowableTask<VoidOutput> {
|
||||
private void controlTask() throws IllegalVariableEvaluationException {
|
||||
List<String> dagCheckNotExistTasks = this.dagCheckNotExistTask(this.tasks);
|
||||
if (!dagCheckNotExistTasks.isEmpty()) {
|
||||
throw new IllegalVariableEvaluationException("Some task doesn't exists on task '" + this.id + "': " + String.join(", ", dagCheckNotExistTasks));
|
||||
throw new IllegalVariableEvaluationException("Some task doesn't exist on task '" + this.id + "': " + String.join(", ", dagCheckNotExistTasks));
|
||||
}
|
||||
|
||||
ArrayList<String> cyclicDependenciesTasks = this.dagCheckCyclicDependencies(this.tasks);
|
||||
@@ -238,14 +238,14 @@ public class Dag extends Task implements FlowableTask<VoidOutput> {
|
||||
public static class DagTask {
|
||||
@NotNull
|
||||
@Schema(
|
||||
title = "The task within the DAG."
|
||||
title = "The task within the DAG"
|
||||
)
|
||||
@PluginProperty
|
||||
private Task task;
|
||||
|
||||
@PluginProperty
|
||||
@Schema(
|
||||
title = "The list of task IDs that should have been successfully executed before starting this task."
|
||||
title = "The list of task IDs that should have been successfully executed before starting this task"
|
||||
)
|
||||
private List<String> dependsOn;
|
||||
}
|
||||
|
||||
@@ -126,15 +126,15 @@ public class EachParallel extends Parallel implements FlowableTask<VoidOutput> {
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
@Schema(
|
||||
title = "Number of concurrent parallel tasks that can be running at any point in time.",
|
||||
description = "If the value is `0`, no limit exist and all the tasks will start at the same time."
|
||||
title = "Number of concurrent parallel tasks that can be running at any point in time",
|
||||
description = "If the value is `0`, no limit exists and all the tasks will start at the same time."
|
||||
)
|
||||
private final Property<Integer> concurrent = Property.ofValue(0);
|
||||
|
||||
@NotNull
|
||||
@PluginProperty(dynamic = true)
|
||||
@Schema(
|
||||
title = "The list of values for this task.",
|
||||
title = "The list of values for this task",
|
||||
description = "The value can be passed as a string, a list of strings, or a list of objects.",
|
||||
oneOf = {String.class, Object[].class}
|
||||
)
|
||||
|
||||
@@ -98,8 +98,8 @@ public class EachSequential extends Sequential implements FlowableTask<VoidOutpu
|
||||
@NotNull
|
||||
@PluginProperty(dynamic = true)
|
||||
@Schema(
|
||||
title = "The list of values for this task.",
|
||||
description = "The value car be passed as a string, a list of strings, or a list of objects.",
|
||||
title = "The list of values for this task",
|
||||
description = "The value can be passed as a string, a list of strings, or a list of objects.",
|
||||
oneOf = {String.class, Object[].class}
|
||||
)
|
||||
private Object value;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user