Compare commits

...

147 Commits

Author SHA1 Message Date
AJ Emerich
42c8334e2e Merge branch 'develop' into docs/purgeFiles 2025-12-18 04:02:10 -06:00
Loïc Mathieu
7ea95f393e feat(execution): add a system.from label
Closes https://github.com/kestra-io/kestra-ee/issues/4699
2025-12-17 15:49:33 +01:00
Piyush Bhaskar
6935900699 fix(core): add a no-op update function to oss store to initialize update (#13732) 2025-12-17 19:47:40 +05:30
AJ Emerich
123d7fb426 Update core/src/main/java/io/kestra/plugin/core/namespace/PurgeFiles.java 2025-12-17 15:13:09 +01:00
Saif M
0bc8e8d74a fix(flow) Improve Exception Handling with clear error message (#13674)
* fix: improved error handling

* including the line

* added tests

* added unit tests
2025-12-17 14:26:53 +01:00
AJ Emerich
e0c3cfa1f9 docs(PurgeFiles): update documentation and example 2025-12-17 12:26:57 +01:00
dependabot[bot]
7f77b24ae0 build(deps): bump com.google.cloud:libraries-bom from 26.72.0 to 26.73.0
Bumps [com.google.cloud:libraries-bom](https://github.com/googleapis/java-cloud-bom) from 26.72.0 to 26.73.0.
- [Release notes](https://github.com/googleapis/java-cloud-bom/releases)
- [Changelog](https://github.com/googleapis/java-cloud-bom/blob/main/release-please-config.json)
- [Commits](https://github.com/googleapis/java-cloud-bom/compare/v26.72.0...v26.73.0)

---
updated-dependencies:
- dependency-name: com.google.cloud:libraries-bom
  dependency-version: 26.73.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 10:14:55 +01:00
dependabot[bot]
ec6820dc25 build(deps): bump org.aspectj:aspectjweaver from 1.9.25 to 1.9.25.1
Bumps [org.aspectj:aspectjweaver](https://github.com/eclipse/org.aspectj) from 1.9.25 to 1.9.25.1.
- [Release notes](https://github.com/eclipse/org.aspectj/releases)
- [Commits](https://github.com/eclipse/org.aspectj/commits)

---
updated-dependencies:
- dependency-name: org.aspectj:aspectjweaver
  dependency-version: 1.9.25.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:51:03 +01:00
dependabot[bot]
d94193c143 build(deps): bump software.amazon.awssdk.crt:aws-crt
Bumps [software.amazon.awssdk.crt:aws-crt](https://github.com/awslabs/aws-crt-java) from 0.40.3 to 0.41.0.
- [Release notes](https://github.com/awslabs/aws-crt-java/releases)
- [Commits](https://github.com/awslabs/aws-crt-java/compare/v0.40.3...v0.41.0)

---
updated-dependencies:
- dependency-name: software.amazon.awssdk.crt:aws-crt
  dependency-version: 0.41.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:50:42 +01:00
dependabot[bot]
c9628047fa build(deps): bump io.qameta.allure:allure-bom from 2.31.0 to 2.32.0
Bumps [io.qameta.allure:allure-bom](https://github.com/allure-framework/allure-java) from 2.31.0 to 2.32.0.
- [Release notes](https://github.com/allure-framework/allure-java/releases)
- [Commits](https://github.com/allure-framework/allure-java/compare/2.31.0...2.32.0)

---
updated-dependencies:
- dependency-name: io.qameta.allure:allure-bom
  dependency-version: 2.32.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:50:13 +01:00
dependabot[bot]
4cbc069af4 build(deps): bump nl.basjes.gitignore:gitignore-reader
Bumps [nl.basjes.gitignore:gitignore-reader](https://github.com/nielsbasjes/codeowners) from 1.13.0 to 1.14.1.
- [Release notes](https://github.com/nielsbasjes/codeowners/releases)
- [Changelog](https://github.com/nielsbasjes/codeowners/blob/main/CHANGELOG.md)
- [Commits](https://github.com/nielsbasjes/codeowners/compare/v1.13.0...v1.14.1)

---
updated-dependencies:
- dependency-name: nl.basjes.gitignore:gitignore-reader
  dependency-version: 1.14.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:49:11 +01:00
dependabot[bot]
eabe573fe6 build(deps): bump software.amazon.awssdk:bom from 2.40.5 to 2.40.10
Bumps software.amazon.awssdk:bom from 2.40.5 to 2.40.10.

---
updated-dependencies:
- dependency-name: software.amazon.awssdk:bom
  dependency-version: 2.40.10
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:46:17 +01:00
dependabot[bot]
ecd64617c3 build(deps): bump org.testcontainers:junit-jupiter from 1.21.3 to 1.21.4
Bumps [org.testcontainers:junit-jupiter](https://github.com/testcontainers/testcontainers-java) from 1.21.3 to 1.21.4.
- [Release notes](https://github.com/testcontainers/testcontainers-java/releases)
- [Changelog](https://github.com/testcontainers/testcontainers-java/blob/main/CHANGELOG.md)
- [Commits](https://github.com/testcontainers/testcontainers-java/compare/1.21.3...1.21.4)

---
updated-dependencies:
- dependency-name: org.testcontainers:junit-jupiter
  dependency-version: 1.21.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:45:26 +01:00
dependabot[bot]
a5650bca0f build(deps): bump org.sonarqube from 7.2.0.6526 to 7.2.1.6560
Bumps org.sonarqube from 7.2.0.6526 to 7.2.1.6560.

---
updated-dependencies:
- dependency-name: org.sonarqube
  dependency-version: 7.2.1.6560
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:44:58 +01:00
dependabot[bot]
ed59e262d4 build(deps): bump org.apache.logging.log4j:log4j-to-slf4j
Bumps org.apache.logging.log4j:log4j-to-slf4j from 2.25.2 to 2.25.3.

---
updated-dependencies:
- dependency-name: org.apache.logging.log4j:log4j-to-slf4j
  dependency-version: 2.25.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:44:30 +01:00
dependabot[bot]
a5f9d54f7d build(deps): bump io.pebbletemplates:pebble from 4.0.0 to 4.1.0
Bumps [io.pebbletemplates:pebble](https://github.com/PebbleTemplates/pebble) from 4.0.0 to 4.1.0.
- [Release notes](https://github.com/PebbleTemplates/pebble/releases)
- [Changelog](https://github.com/PebbleTemplates/pebble/blob/master/release.properties)
- [Commits](https://github.com/PebbleTemplates/pebble/compare/4.0.0...4.1.0)

---
updated-dependencies:
- dependency-name: io.pebbletemplates:pebble
  dependency-version: 4.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:38:13 +01:00
Aaryan meena
47f4f43198 refactor(core): remove usage of unnecessary i18n composable (#13686)
Closes https://github.com/kestra-io/kestra/issues/13640.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-17 08:23:27 +01:00
dependabot[bot]
5d31c97f7f build(deps): bump the minor group in /ui with 6 updates (#13725)
Bumps the minor group in /ui with 6 updates:

| Package | From | To |
| --- | --- | --- |
| [posthog-js](https://github.com/PostHog/posthog-js) | `1.304.0` | `1.308.0` |
| [shiki](https://github.com/shikijs/shiki/tree/HEAD/packages/shiki) | `3.19.0` | `3.20.0` |
| [@shikijs/markdown-it](https://github.com/shikijs/shiki/tree/HEAD/packages/markdown-it) | `3.19.0` | `3.20.0` |
| [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) | `8.49.0` | `8.50.0` |
| [sass](https://github.com/sass/dart-sass) | `1.96.0` | `1.97.0` |
| [typescript-eslint](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/typescript-eslint) | `8.49.0` | `8.50.0` |


Updates `posthog-js` from 1.304.0 to 1.308.0
- [Release notes](https://github.com/PostHog/posthog-js/releases)
- [Changelog](https://github.com/PostHog/posthog-js/blob/main/CHANGELOG.md)
- [Commits](https://github.com/PostHog/posthog-js/compare/posthog-js@1.304.0...posthog-js@1.308.0)

Updates `shiki` from 3.19.0 to 3.20.0
- [Release notes](https://github.com/shikijs/shiki/releases)
- [Commits](https://github.com/shikijs/shiki/commits/v3.20.0/packages/shiki)

Updates `@shikijs/markdown-it` from 3.19.0 to 3.20.0
- [Release notes](https://github.com/shikijs/shiki/releases)
- [Commits](https://github.com/shikijs/shiki/commits/v3.20.0/packages/markdown-it)

Updates `@typescript-eslint/parser` from 8.49.0 to 8.50.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/parser/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.50.0/packages/parser)

Updates `sass` from 1.96.0 to 1.97.0
- [Release notes](https://github.com/sass/dart-sass/releases)
- [Changelog](https://github.com/sass/dart-sass/blob/main/CHANGELOG.md)
- [Commits](https://github.com/sass/dart-sass/compare/1.96.0...1.97.0)

Updates `typescript-eslint` from 8.49.0 to 8.50.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/typescript-eslint/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.50.0/packages/typescript-eslint)

---
updated-dependencies:
- dependency-name: posthog-js
  dependency-version: 1.308.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: shiki
  dependency-version: 3.20.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@shikijs/markdown-it"
  dependency-version: 3.20.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@typescript-eslint/parser"
  dependency-version: 8.50.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: sass
  dependency-version: 1.97.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: typescript-eslint
  dependency-version: 8.50.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:18:19 +01:00
dependabot[bot]
f8107285c4 build(deps): bump the patch group in /ui with 5 updates (#13726)
Bumps the patch group in /ui with 5 updates:

| Package | From | To |
| --- | --- | --- |
| [vue-router](https://github.com/vuejs/router) | `4.6.3` | `4.6.4` |
| [@eslint/js](https://github.com/eslint/eslint/tree/HEAD/packages/js) | `9.39.1` | `9.39.2` |
| [@vitejs/plugin-vue](https://github.com/vitejs/vite-plugin-vue/tree/HEAD/packages/plugin-vue) | `6.0.2` | `6.0.3` |
| [eslint](https://github.com/eslint/eslint) | `9.39.1` | `9.39.2` |
| [rolldown-vite](https://github.com/vitejs/rolldown-vite/tree/HEAD/packages/vite) | `7.2.10` | `7.2.11` |


Updates `vue-router` from 4.6.3 to 4.6.4
- [Release notes](https://github.com/vuejs/router/releases)
- [Commits](https://github.com/vuejs/router/compare/v4.6.3...v4.6.4)

Updates `@eslint/js` from 9.39.1 to 9.39.2
- [Release notes](https://github.com/eslint/eslint/releases)
- [Commits](https://github.com/eslint/eslint/commits/v9.39.2/packages/js)

Updates `@vitejs/plugin-vue` from 6.0.2 to 6.0.3
- [Release notes](https://github.com/vitejs/vite-plugin-vue/releases)
- [Changelog](https://github.com/vitejs/vite-plugin-vue/blob/main/packages/plugin-vue/CHANGELOG.md)
- [Commits](https://github.com/vitejs/vite-plugin-vue/commits/plugin-vue@6.0.3/packages/plugin-vue)

Updates `eslint` from 9.39.1 to 9.39.2
- [Release notes](https://github.com/eslint/eslint/releases)
- [Commits](https://github.com/eslint/eslint/compare/v9.39.1...v9.39.2)

Updates `rolldown-vite` from 7.2.10 to 7.2.11
- [Release notes](https://github.com/vitejs/rolldown-vite/releases)
- [Changelog](https://github.com/vitejs/rolldown-vite/blob/rolldown-vite/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/rolldown-vite/commits/v7.2.11/packages/vite)

---
updated-dependencies:
- dependency-name: vue-router
  dependency-version: 4.6.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: "@eslint/js"
  dependency-version: 9.39.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: "@vitejs/plugin-vue"
  dependency-version: 6.0.3
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: eslint
  dependency-version: 9.39.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: rolldown-vite
  dependency-version: 7.2.11
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:13:30 +01:00
dependabot[bot]
8dc8dc1796 build(deps): bump the build group in /ui with 9 updates (#13723)
Bumps the build group in /ui with 9 updates:

| Package | From | To |
| --- | --- | --- |
| [@esbuild/darwin-arm64](https://github.com/evanw/esbuild) | `0.27.1` | `0.27.2` |
| [@esbuild/darwin-x64](https://github.com/evanw/esbuild) | `0.27.1` | `0.27.2` |
| [@esbuild/linux-x64](https://github.com/evanw/esbuild) | `0.27.1` | `0.27.2` |
| [@rollup/rollup-darwin-arm64](https://github.com/rollup/rollup) | `4.53.3` | `4.53.5` |
| [@rollup/rollup-darwin-x64](https://github.com/rollup/rollup) | `4.53.3` | `4.53.5` |
| [@rollup/rollup-linux-x64-gnu](https://github.com/rollup/rollup) | `4.53.3` | `4.53.5` |
| [@swc/core-darwin-arm64](https://github.com/swc-project/swc) | `1.15.3` | `1.15.5` |
| [@swc/core-darwin-x64](https://github.com/swc-project/swc) | `1.15.3` | `1.15.5` |
| [@swc/core-linux-x64-gnu](https://github.com/swc-project/swc) | `1.15.3` | `1.15.5` |


Updates `@esbuild/darwin-arm64` from 0.27.1 to 0.27.2
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.1...v0.27.2)

Updates `@esbuild/darwin-x64` from 0.27.1 to 0.27.2
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.1...v0.27.2)

Updates `@esbuild/linux-x64` from 0.27.1 to 0.27.2
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.1...v0.27.2)

Updates `@rollup/rollup-darwin-arm64` from 4.53.3 to 4.53.5
- [Release notes](https://github.com/rollup/rollup/releases)
- [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rollup/rollup/compare/v4.53.3...v4.53.5)

Updates `@rollup/rollup-darwin-x64` from 4.53.3 to 4.53.5
- [Release notes](https://github.com/rollup/rollup/releases)
- [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rollup/rollup/compare/v4.53.3...v4.53.5)

Updates `@rollup/rollup-linux-x64-gnu` from 4.53.3 to 4.53.5
- [Release notes](https://github.com/rollup/rollup/releases)
- [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rollup/rollup/compare/v4.53.3...v4.53.5)

Updates `@swc/core-darwin-arm64` from 1.15.3 to 1.15.5
- [Release notes](https://github.com/swc-project/swc/releases)
- [Changelog](https://github.com/swc-project/swc/blob/main/CHANGELOG.md)
- [Commits](https://github.com/swc-project/swc/compare/v1.15.3...v1.15.5)

Updates `@swc/core-darwin-x64` from 1.15.3 to 1.15.5
- [Release notes](https://github.com/swc-project/swc/releases)
- [Changelog](https://github.com/swc-project/swc/blob/main/CHANGELOG.md)
- [Commits](https://github.com/swc-project/swc/compare/v1.15.3...v1.15.5)

Updates `@swc/core-linux-x64-gnu` from 1.15.3 to 1.15.5
- [Release notes](https://github.com/swc-project/swc/releases)
- [Changelog](https://github.com/swc-project/swc/blob/main/CHANGELOG.md)
- [Commits](https://github.com/swc-project/swc/compare/v1.15.3...v1.15.5)

---
updated-dependencies:
- dependency-name: "@esbuild/darwin-arm64"
  dependency-version: 0.27.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@esbuild/darwin-x64"
  dependency-version: 0.27.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@esbuild/linux-x64"
  dependency-version: 0.27.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@rollup/rollup-darwin-arm64"
  dependency-version: 4.53.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@rollup/rollup-darwin-x64"
  dependency-version: 4.53.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@rollup/rollup-linux-x64-gnu"
  dependency-version: 4.53.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@swc/core-darwin-arm64"
  dependency-version: 1.15.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@swc/core-darwin-x64"
  dependency-version: 1.15.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@swc/core-linux-x64-gnu"
  dependency-version: 1.15.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:09:02 +01:00
dependabot[bot]
834dfd2947 build(deps-dev): bump @types/node in /ui in the types group (#13724)
Bumps the types group in /ui with 1 update: [@types/node](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node).


Updates `@types/node` from 25.0.0 to 25.0.3
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/node)

---
updated-dependencies:
- dependency-name: "@types/node"
  dependency-version: 25.0.3
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: types
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:08:41 +01:00
YannC
6edb88841f feat(jdbc): method without auditlog for setting repository (#13676)
* feat(jdbc): method without auditlog for setting repository

* test: add flaky annotation
2025-12-16 16:38:50 +01:00
Loïc Mathieu
5653531628 fix(test): avoid killing an already killed execution 2025-12-16 14:39:00 +01:00
github-actions[bot]
ee61276106 chore(core): localize to languages other than english (#13698)
Co-authored-by: GitHub Action <actions@github.com>
2025-12-16 14:28:22 +01:00
Barthélémy Ledoux
abcf76f7b4 fix: avoid blocking creation of flow when edition is restricted to a namespace (#13694) 2025-12-16 14:24:16 +01:00
YannC
67ada7f61b fix: remove JsonIgnore annotation from FlowWithSource and add schema(hidden=true) to Flow (#13681) 2025-12-16 14:23:56 +01:00
Florian Hussonnois
0c13633f77 fix(trigger): ScheduleOnDates should work with backfill
Changes:
* ScheduleOnDates must not be re-scheduled when trigger is updated
* ScheduleOnDates must not be scheduled on previous dates when created
* ScheduleOnDates should properly support backfill

Create new SchedulableExecutionFactory class to hold all methods related
to Schedulable trigger and which are only used by core triggers

Related-to: #13673
2025-12-16 13:47:47 +01:00
Loïc Mathieu
a6cf2015ff fix(tests): concurrency test restarted 2025-12-16 13:42:42 +01:00
Sumit Shandillya
2f9216c70b fix(triggers): improve layout of action buttons in trigger table (#13658) 2025-12-16 17:50:46 +05:30
Piyush Bhaskar
1903e6fac5 fix(plugins): avoid list flash when opening plugin (#13690) 2025-12-16 17:38:09 +05:30
Loïc Mathieu
2d2cb00cab feat(execution): bring support for input and output processing in the run context
Part-of: https://github.com/kestra-io/kestra-ee/issues/4228

Encapsulate access the FlowInputOutput from the RunContext in a new InputAndOutput component with a currated list of supported methods used by plugins.
2025-12-16 12:19:48 +01:00
Loïc Mathieu
01b5441d16 feat(trigger): refactor Schedule to not use the application context
Part-of:  https://github.com/kestra-io/kestra-ee/issues/4228
2025-12-16 12:19:30 +01:00
Loïc Mathieu
efc778e294 feat(system): save the edition in settings
This would allow to detect OSS -> EE migration.

Closes https://github.com/kestra-io/kestra-ee/issues/5106
2025-12-16 11:06:01 +01:00
Will Russell
60235a4e73 docs(task-runner): remove deprecated runner from example (#13654) 2025-12-16 10:01:27 +00:00
Piyush Bhaskar
b167c52e76 fix(core): properly sync default namespace filters from settings with default filter (#13685) 2025-12-16 15:30:55 +05:30
Florian Hussonnois
216b124294 feat(trigger): add support for concurrent trigger execution (#311)
Fixes: #311
2025-12-16 09:50:48 +01:00
vamsi172323
b6e4df8de2 refactor(core): remove usage of unnecessary i18n composable (#13683)
Closes https://github.com/kestra-io/kestra/issues/13649.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-16 08:26:27 +01:00
Loïc Mathieu
429e7c7945 feat(execution): allow listing the internal storage from the run context
Part-of: https://github.com/kestra-io/kestra-ee/issues/4228
2025-12-15 18:06:49 +01:00
mustafatarek
e302b4be4a chore(tests): update namings of evaluation tests 2025-12-15 16:11:25 +01:00
mustafatarek
8e7ad9ae25 chore(scheduler): revert back changes at handle() and create failed execution with emitting it directly from the catch block 2025-12-15 16:11:25 +01:00
mustafatarek
41a11abf16 chore(tests): add small notes to tests 2025-12-15 16:11:25 +01:00
mustafatarek
1be16d5e9d feat(tests): add more test coverage for trigger evaluation failure
- This covers failures propagated to evaluateScheduleTrigger() in AbstractScheduler class related to ScheduleTrigger such as (Invalid Expressions, Inputs resolving Issues...)
2025-12-15 16:11:25 +01:00
mustafatarek
e263224d7b fix(core): return backfill check at handleFailedEvaluatedTrigger() 2025-12-15 16:11:25 +01:00
mustafatarek
12b89588a6 fix(core): add failed execution with logs when scheduled triggers fail during evaluation 2025-12-15 16:11:25 +01:00
Loïc Mathieu
eae5eb80cb fix(test): use a separate tenant for each test 2025-12-15 15:41:21 +01:00
Loïc Mathieu
c0f6298484 feat(system)!: change logger name and disable flow logger by default
Change system logger name:
- execution -> executor
- trigger -> scheduler
- task -> worker

Add tenant and namespace in the name of loggers.

Disable by default the flow execution logger.
2025-12-15 15:41:09 +01:00
Barthélémy Ledoux
ba1d6b2232 fix: executing validation twice should display 2 errors (#13670) 2025-12-15 14:02:37 +01:00
Pratik Dey
048dcb80cc fix: Webhook-triggered executions do not generate system.correlationId label while direct API executions do 2025-12-15 12:37:23 +01:00
yuri
a81de811d7 feat(ui): make log buttons friendlier (#13404)
Co-authored-by: Miloš Paunović <paun992@hotmail.com>
Co-authored-by: Barthélémy Ledoux <bledoux@kestra.io>
2025-12-15 10:58:36 +01:00
Loïc Mathieu
a960a9f982 feat(plugin): bring cloneForPlugin to the RunContext
To replace the usage of the RunContextInitializer for that as plugins using another plugin needs it.

Part-of: https://github.com/kestra-io/kestra-ee/issues/4228
2025-12-15 09:58:54 +01:00
Miloš Paunović
c4d4fd935f chore(flows): make trigger icon not a button (#13666)
Closes https://github.com/kestra-io/kestra/issues/13634.
2025-12-15 09:31:22 +01:00
Suraj
f063a5a2d9 refactor(core): remove usage of unnecessary i18n composable (#13663)
Closes https://github.com/kestra-io/kestra/issues/13652.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-15 08:06:22 +01:00
Mohd Toukir Khan
ac91d5605f refactor(core): remove usage of unnecessary i18n composable (#13662)
Closes https://github.com/kestra-io/kestra/issues/13650.

Co-authored-by: Mohd Toukir Khan <Toukir@MacBook-Air-2.local>
Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-15 08:03:39 +01:00
Madhav Kaushik
e3d3c3651b refactor(core): remove usage of unnecessary i18n composable (#13661)
Closes https://github.com/kestra-io/kestra/issues/13651.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-15 08:00:48 +01:00
Sumit Shandillya
5b6836237e refactor(core): remove usage of unnecessary i18n composable (#13643)
Closes https://github.com/kestra-io/kestra/issues/13639.

Signed-off-by: Sumitsh28 <sumit.off28@gmail.com>
Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-12 15:38:39 +01:00
Lee Kyeong Joon
2f8284b133 refactor(core): remove usage of unnecessary i18n composable (#13645)
Closes https://github.com/kestra-io/kestra/issues/13590.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-12 15:36:50 +01:00
Siva Sai
42992fd7c3 fix(tests): add multiselect input tests for default and provided values 2025-12-12 14:45:44 +01:00
Siva Sai
3a481f93d3 fix(triggers): resolve MULTISELECT input defaults failing on scheduled executions 2025-12-12 14:45:44 +01:00
pengpeng
7e964ae563 fix(core): align login inputs in center (#13532)
Co-authored-by: Piyush Bhaskar <102078527+Piyush-r-bhaskar@users.noreply.github.com>
2025-12-12 19:08:46 +05:30
Saif M
25e54edbc9 feat(core): always bundle the fonts into the build (#13624)
Closes https://github.com/kestra-io/kestra/issues/13599.

Co-authored-by: Miloš Paunović <paun992@hotmail.com>
2025-12-12 13:46:28 +01:00
Ameen PJ
e88dc7af76 fix(core) : removed top border for the first log entry (#13621) 2025-12-12 18:16:20 +05:30
Miloš Paunović
b7a027f0dc feat(system): display concurrency limits page based on property from endpoint (#13633)
Closes https://github.com/kestra-io/kestra-ee/issues/5882.
2025-12-12 09:51:58 +01:00
Miloš Paunović
98141d6010 chore(core): amend preprocessor options in vite config (#13632) 2025-12-12 09:48:34 +01:00
Miloš Paunović
bf119ab6df chore(core): align font sizes with the new scss variables from ui-libs (#13598) 2025-12-12 09:01:35 +01:00
Barthélémy Ledoux
9bd6353b77 fix: clicking on a plugin should access the plugin page (#13628)
Co-authored-by: Piyush Bhaskar <102078527+Piyush-r-bhaskar@users.noreply.github.com>
2025-12-11 22:13:07 +01:00
Roman Acevedo
c0ab581cf1 ci: use npm ci instead of install 2025-12-11 18:53:15 +01:00
Loïc Mathieu
0f38e19663 chore(system): refactor NamespaceFilesUtils as a static class
Part-of: https://github.com/kestra-io/kestra-ee/issues/4228
2025-12-11 18:16:07 +01:00
Barthélémy Ledoux
0c14ea621c fix: if multiple definition of a task, call server (#13545) 2025-12-11 17:18:41 +01:00
Malay Dewangan
fb14e57a7c feat(plugin): add title and description to plugin cls 2025-12-11 21:26:11 +05:30
dependabot[bot]
09c707d865 build(deps): bump the minor group in /ui with 15 updates (#13616)
Bumps the minor group in /ui with 15 updates:

| Package | From | To |
| --- | --- | --- |
| [@vue-flow/core](https://github.com/bcakmakoglu/vue-flow/tree/HEAD/packages/core) | `1.47.0` | `1.48.0` |
| [@vueuse/core](https://github.com/vueuse/vueuse/tree/HEAD/packages/core) | `14.0.0` | `14.1.0` |
| [element-plus](https://github.com/element-plus/element-plus) | `2.11.8` | `2.12.0` |
| [posthog-js](https://github.com/PostHog/posthog-js) | `1.296.0` | `1.304.0` |
| [shiki](https://github.com/shikijs/shiki/tree/HEAD/packages/shiki) | `3.15.0` | `3.19.0` |
| [vue-sidebar-menu](https://github.com/yaminncco/vue-sidebar-menu) | `5.8.0` | `5.9.1` |
| [@playwright/test](https://github.com/microsoft/playwright) | `1.56.1` | `1.57.0` |
| [@shikijs/markdown-it](https://github.com/shikijs/shiki/tree/HEAD/packages/markdown-it) | `3.15.0` | `3.19.0` |
| [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) | `8.47.0` | `8.49.0` |
| [@vueuse/router](https://github.com/vueuse/vueuse/tree/HEAD/packages/router) | `14.0.0` | `14.1.0` |
| [jsdom](https://github.com/jsdom/jsdom) | `27.2.0` | `27.3.0` |
| [playwright](https://github.com/microsoft/playwright) | `1.56.1` | `1.57.0` |
| [prettier](https://github.com/prettier/prettier) | `3.6.2` | `3.7.4` |
| [sass](https://github.com/sass/dart-sass) | `1.94.1` | `1.96.0` |
| [typescript-eslint](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/typescript-eslint) | `8.47.0` | `8.49.0` |


Updates `@vue-flow/core` from 1.47.0 to 1.48.0
- [Release notes](https://github.com/bcakmakoglu/vue-flow/releases)
- [Changelog](https://github.com/bcakmakoglu/vue-flow/blob/master/packages/core/CHANGELOG.md)
- [Commits](https://github.com/bcakmakoglu/vue-flow/commits/@vue-flow/core@1.48.0/packages/core)

Updates `@vueuse/core` from 14.0.0 to 14.1.0
- [Release notes](https://github.com/vueuse/vueuse/releases)
- [Commits](https://github.com/vueuse/vueuse/commits/v14.1.0/packages/core)

Updates `element-plus` from 2.11.8 to 2.12.0
- [Release notes](https://github.com/element-plus/element-plus/releases)
- [Changelog](https://github.com/element-plus/element-plus/blob/dev/CHANGELOG.en-US.md)
- [Commits](https://github.com/element-plus/element-plus/compare/2.11.8...2.12.0)

Updates `posthog-js` from 1.296.0 to 1.304.0
- [Release notes](https://github.com/PostHog/posthog-js/releases)
- [Changelog](https://github.com/PostHog/posthog-js/blob/main/CHANGELOG.md)
- [Commits](https://github.com/PostHog/posthog-js/compare/posthog-js@1.296.0...posthog-js@1.304.0)

Updates `shiki` from 3.15.0 to 3.19.0
- [Release notes](https://github.com/shikijs/shiki/releases)
- [Commits](https://github.com/shikijs/shiki/commits/v3.19.0/packages/shiki)

Updates `vue-sidebar-menu` from 5.8.0 to 5.9.1
- [Commits](https://github.com/yaminncco/vue-sidebar-menu/commits)

Updates `@playwright/test` from 1.56.1 to 1.57.0
- [Release notes](https://github.com/microsoft/playwright/releases)
- [Commits](https://github.com/microsoft/playwright/compare/v1.56.1...v1.57.0)

Updates `@shikijs/markdown-it` from 3.15.0 to 3.19.0
- [Release notes](https://github.com/shikijs/shiki/releases)
- [Commits](https://github.com/shikijs/shiki/commits/v3.19.0/packages/markdown-it)

Updates `@typescript-eslint/parser` from 8.47.0 to 8.49.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/parser/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.49.0/packages/parser)

Updates `@vueuse/router` from 14.0.0 to 14.1.0
- [Release notes](https://github.com/vueuse/vueuse/releases)
- [Commits](https://github.com/vueuse/vueuse/commits/v14.1.0/packages/router)

Updates `jsdom` from 27.2.0 to 27.3.0
- [Release notes](https://github.com/jsdom/jsdom/releases)
- [Changelog](https://github.com/jsdom/jsdom/blob/main/Changelog.md)
- [Commits](https://github.com/jsdom/jsdom/compare/27.2.0...27.3.0)

Updates `playwright` from 1.56.1 to 1.57.0
- [Release notes](https://github.com/microsoft/playwright/releases)
- [Commits](https://github.com/microsoft/playwright/compare/v1.56.1...v1.57.0)

Updates `prettier` from 3.6.2 to 3.7.4
- [Release notes](https://github.com/prettier/prettier/releases)
- [Changelog](https://github.com/prettier/prettier/blob/main/CHANGELOG.md)
- [Commits](https://github.com/prettier/prettier/compare/3.6.2...3.7.4)

Updates `sass` from 1.94.1 to 1.96.0
- [Release notes](https://github.com/sass/dart-sass/releases)
- [Changelog](https://github.com/sass/dart-sass/blob/main/CHANGELOG.md)
- [Commits](https://github.com/sass/dart-sass/compare/1.94.1...1.96.0)

Updates `typescript-eslint` from 8.47.0 to 8.49.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/typescript-eslint/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.49.0/packages/typescript-eslint)

---
updated-dependencies:
- dependency-name: "@vue-flow/core"
  dependency-version: 1.48.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@vueuse/core"
  dependency-version: 14.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: element-plus
  dependency-version: 2.12.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: posthog-js
  dependency-version: 1.304.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: shiki
  dependency-version: 3.19.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: vue-sidebar-menu
  dependency-version: 5.9.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@playwright/test"
  dependency-version: 1.57.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@shikijs/markdown-it"
  dependency-version: 3.19.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@typescript-eslint/parser"
  dependency-version: 8.49.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@vueuse/router"
  dependency-version: 14.1.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: jsdom
  dependency-version: 27.3.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: playwright
  dependency-version: 1.57.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: prettier
  dependency-version: 3.7.4
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: sass
  dependency-version: 1.96.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: typescript-eslint
  dependency-version: 8.49.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-11 15:36:31 +01:00
Loïc Mathieu
86e08d71dd fix(test): makes TracesTest.runningAFlowShouldGenerateTraces more tolerant on the number of traces
Rarely, in the CI, only 6 traces are seen most probably due to the asynchronous nature of OpenTelemetry
2025-12-11 14:55:46 +01:00
Miloš Paunović
94c00cedeb build(deps): improve storybook related grouping of dependabot pull requests (#13618) 2025-12-11 14:54:37 +01:00
Loïc Mathieu
eb12832b1e feat(system): add a boolean in the config to know if the concurrency view is enabled
Part-of: https://github.com/kestra-io/kestra-ee/issues/5882
2025-12-11 14:54:09 +01:00
Loïc Mathieu
687cefdfb9 fix(tests): use a different tenant for each concurrency test 2025-12-11 14:34:04 +01:00
Loïc Mathieu
8eae8aba72 feat(executions): add a protection mecanism to avoid any potential concurrency overflow
Concurrency limit is based on a counter that increment and decrement the limit each time a flow is started and terminated.

This count should always be accurate.

But if some unexpected event occurs (bug or user manually do something wrong), the count may not be accurate anymore.

To avoid any potential issue, when we decrement the counter, we chech that concurrency count is bellow the limit before unqueing an execution.

Fixes #12031
Closes  #13301
2025-12-11 14:33:30 +01:00
dependabot[bot]
abdbb8d364 build(deps-dev): bump @types/node in /ui in the types group (#13613)
Bumps the types group in /ui with 1 update: [@types/node](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node).


Updates `@types/node` from 24.10.2 to 25.0.0
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/node)

---
updated-dependencies:
- dependency-name: "@types/node"
  dependency-version: 25.0.0
  dependency-type: direct:development
  update-type: version-update:semver-major
  dependency-group: types
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-11 14:10:45 +01:00
Barthélémy Ledoux
8a55ab3af6 fix: always load the right version of plugin docs (#13571) 2025-12-11 14:06:10 +01:00
dependabot[bot]
b7cb933e1e build(deps): bump the patch group in /ui with 9 updates (#13568)
Bumps the patch group in /ui with 9 updates:

| Package | From | To |
| --- | --- | --- |
| @kestra-io/ui-libs | `0.0.264` | `0.0.266` |
| [humanize-duration](https://github.com/EvanHahn/HumanizeDuration.js) | `3.33.1` | `3.33.2` |
| [pdfjs-dist](https://github.com/mozilla/pdf.js) | `5.4.394` | `5.4.449` |
| [vue](https://github.com/vuejs/core) | `3.5.24` | `3.5.25` |
| [yaml](https://github.com/eemeli/yaml) | `2.8.1` | `2.8.2` |
| [lint-staged](https://github.com/lint-staged/lint-staged) | `16.2.6` | `16.2.7` |
| [rimraf](https://github.com/isaacs/rimraf) | `6.1.0` | `6.1.2` |
| [rolldown-vite](https://github.com/vitejs/rolldown-vite/tree/HEAD/packages/vite) | `7.2.6` | `7.2.10` |
| [vue-tsc](https://github.com/vuejs/language-tools/tree/HEAD/packages/tsc) | `3.1.4` | `3.1.8` |


Updates `@kestra-io/ui-libs` from 0.0.264 to 0.0.266

Updates `humanize-duration` from 3.33.1 to 3.33.2
- [Changelog](https://github.com/EvanHahn/HumanizeDuration.js/blob/main/HISTORY.md)
- [Commits](https://github.com/EvanHahn/HumanizeDuration.js/compare/v3.33.1...v3.33.2)

Updates `pdfjs-dist` from 5.4.394 to 5.4.449
- [Release notes](https://github.com/mozilla/pdf.js/releases)
- [Commits](https://github.com/mozilla/pdf.js/compare/v5.4.394...v5.4.449)

Updates `vue` from 3.5.24 to 3.5.25
- [Release notes](https://github.com/vuejs/core/releases)
- [Changelog](https://github.com/vuejs/core/blob/main/CHANGELOG.md)
- [Commits](https://github.com/vuejs/core/compare/v3.5.24...v3.5.25)

Updates `yaml` from 2.8.1 to 2.8.2
- [Release notes](https://github.com/eemeli/yaml/releases)
- [Commits](https://github.com/eemeli/yaml/compare/v2.8.1...v2.8.2)

Updates `lint-staged` from 16.2.6 to 16.2.7
- [Release notes](https://github.com/lint-staged/lint-staged/releases)
- [Changelog](https://github.com/lint-staged/lint-staged/blob/main/CHANGELOG.md)
- [Commits](https://github.com/lint-staged/lint-staged/compare/v16.2.6...v16.2.7)

Updates `rimraf` from 6.1.0 to 6.1.2
- [Changelog](https://github.com/isaacs/rimraf/blob/main/CHANGELOG.md)
- [Commits](https://github.com/isaacs/rimraf/compare/v6.1.0...v6.1.2)

Updates `rolldown-vite` from 7.2.6 to 7.2.10
- [Release notes](https://github.com/vitejs/rolldown-vite/releases)
- [Changelog](https://github.com/vitejs/rolldown-vite/blob/rolldown-vite/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/rolldown-vite/commits/v7.2.10/packages/vite)

Updates `vue-tsc` from 3.1.4 to 3.1.8
- [Release notes](https://github.com/vuejs/language-tools/releases)
- [Changelog](https://github.com/vuejs/language-tools/blob/master/CHANGELOG.md)
- [Commits](https://github.com/vuejs/language-tools/commits/v3.1.8/packages/tsc)

---
updated-dependencies:
- dependency-name: "@kestra-io/ui-libs"
  dependency-version: 0.0.266
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: humanize-duration
  dependency-version: 3.33.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: pdfjs-dist
  dependency-version: 5.4.449
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: vue
  dependency-version: 3.5.25
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: yaml
  dependency-version: 2.8.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: lint-staged
  dependency-version: 16.2.7
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: rimraf
  dependency-version: 6.1.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: rolldown-vite
  dependency-version: 7.2.10
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: vue-tsc
  dependency-version: 3.1.8
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-11 13:47:09 +01:00
Shivansh Sharma
3af003e5e4 chore(executions): properly handle the label create/update sequence (#13500)
Closes https://github.com/kestra-io/kestra/issues/13498.

Co-authored-by: Miloš Paunović <paun992@hotmail.com>
2025-12-11 12:51:59 +01:00
Loïc Mathieu
c3861a5532 fix(system): merging collections should not duplicate items
Fixes https://github.com/kestra-io/kestra-ee/issues/6053
2025-12-11 12:14:48 +01:00
Piyush Bhaskar
ae1f10f45a refactor(core): remove the configurattion details step (#13606) 2025-12-11 16:44:32 +05:30
Ashutosh Jha
612dccfb8c refactor(core): remove usage of unnecessary i18n composable (#13607)
Closes https://github.com/kestra-io/kestra/issues/13589.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-11 12:04:46 +01:00
Ameen PJ
2ae8df2f5f refactor(core): remove usage of unnecessary i18n composable (#13604)
Closes https://github.com/kestra-io/kestra/issues/13588.

Co-authored-by: ameenpj <ameenjami9@gmail.com>
Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-11 12:01:32 +01:00
Saif M
1abfa74a16 chore(flows): add links to executions on the flows listing table (#13540)
Closes https://github.com/kestra-io/kestra/issues/13536.

Co-authored-by: Miloš Paunović <paun992@hotmail.com>
2025-12-11 11:17:48 +01:00
Aditya Kumar Puri
69a793b227 chore(executions): amend breadcrumb on the single execution page (#13544)
Closes https://github.com/kestra-io/kestra/issues/13394.

Co-authored-by: Miloš Paunović <paun992@hotmail.com>
2025-12-11 11:02:52 +01:00
EM
35ccb3e39b refactor(core): remove usage of unnecessary i18n composable (#13580)
Closes https://github.com/kestra-io/kestra/issues/13351.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-11 10:51:53 +01:00
Piyush Bhaskar
3a7fcb2aa1 fix(core): changing Filters should reset to page 1 (#13596) 2025-12-11 14:39:11 +05:30
Miloš Paunović
103c5b92e9 chore(executions): show all available date options for the chart on the overview page (#13595)
Related to https://github.com/kestra-io/kestra/issues/13361.
2025-12-11 09:43:33 +01:00
Piyush Bhaskar
5253eeef95 fix(core): add seach where it was missed (#13594) 2025-12-11 14:08:14 +05:30
Miloš Paunović
848f835191 chore(core): properly load font weight 600 of public sans (#13593)
Closes https://github.com/kestra-io/kestra/issues/13592.
2025-12-11 09:33:23 +01:00
Malay Dewangan
3e55e67534 feat(plugin-metadata): add plugin metadata (#13539)
* docs(core-plugin-metadata): populate descriptions

---------

Co-authored-by: AJ Emerich <aj-emerich@proton.me>
2025-12-11 11:32:24 +05:30
Nicolas K.
7bca8b4924 fix(repositories): unwanted integer to string conversion (#13586)
* fix(repositories): unwanted integer to string conversion

* fix(repositories): clean code

---------

Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-12-10 18:59:52 +01:00
Loïc Mathieu
56febfb415 fix(executions): don't remove worker task result for killed execution
As killing an executuion is asynchronous, it is inherently racy.
So when we kill an execution it move to the rerminal state which then will remove any worker task result to purge eagerly the queue table.

But if such worker task result arrives late and was not already processed by the executor, it will be purged before ever been able to be processed so the task would nevert be updated as KILLED.

Note: this may un-flaky some falky unit tests

Fixes https://github.com/kestra-io/kestra-ee/issues/6040
2025-12-10 17:27:11 +01:00
Nicolas K.
925b8c6954 fix(flows): deserialyze expression without cache (#13576)
* fix(flows): deserialyze expression without cache

* fix(flows): remove cache when deserialyzing expression

* fix(flows): remove cache when deserialyzing expression

---------

Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-12-10 17:04:50 +01:00
mustafatarek
708816fe67 fix(core): return back to Collection check on yaml/json input values and add example of it in InputsTest 2025-12-10 16:31:14 +01:00
mustafatarek
5502473fa4 chore(core): change variable name to object 2025-12-10 16:31:14 +01:00
mustafatarek
c6cf0147a4 refactor(core): simplify changes by using variable 2025-12-10 16:31:14 +01:00
mustafatarek
2951f4b4bc feat(tests): add test coverage for json input type parsing as map 2025-12-10 16:31:14 +01:00
mustafatarek
4ea13e258b fix(core): fix parsing of json input type as for yaml 2025-12-10 16:31:14 +01:00
mustafatarek
3f8dcb47fd refactor(core): In case of yaml as map just return as it is, no need to serialize/deserialize again 2025-12-10 16:31:14 +01:00
mustafatarek
42dc3b930c fix(tests): pass previewInternalStorageFileFromExecution() test 2025-12-10 16:31:14 +01:00
mustafatarek
97a78abd28 refactor(core): update yaml test structure 2025-12-10 16:31:14 +01:00
mustafatarek
b3b2ef1b5a fix(tests): pass inputs() test 2025-12-10 16:31:14 +01:00
mustafatarek
596a26a137 fix(tests): pass all failed tests 2025-12-10 16:31:14 +01:00
mustafatarek
8a9a1df436 fix(tests): fix failed tests after adding another input 2025-12-10 16:31:14 +01:00
mustafatarek
55d0880ed3 refactor(tests): move test coverage to InputsTest instead of SubflowRunnerTest
- It is related to serializing inputs at resolving phase only
 - Added Inputs as Java Objects using yml should be serialized/deserialized properly to give the same structure at allValidInputs() test
2025-12-10 16:31:14 +01:00
mustafatarek
a74ebd5cd6 fix(tests): fix allValidTypedInputs() test by avoiding serialization of strings or other scalars, only serialize Maps and Collections 2025-12-10 16:31:14 +01:00
mustafatarek
f3aed38964 chore(tests): improve coding format at subflowInputTypeYmlSerialization() test 2025-12-10 16:31:14 +01:00
mustafatarek
2595e56199 feat(tests): add test coverage for subflow inputs serialized with type yaml 2025-12-10 16:31:14 +01:00
mustafatarek
e821bd7f65 refactor(core): use writeValueAsString() for all cases 2025-12-10 16:31:14 +01:00
mustafatarek
09762d2a8d fix(core): serialize subflow inputs with type yml properly 2025-12-10 16:31:14 +01:00
YannC
018c22918f Revert "feat(jdbc): method without auditlog registered for setting repository (#13543)" (#13581)
This reverts commit 3e9c8cf7da.
2025-12-10 16:00:55 +01:00
YannC
3e9c8cf7da feat(jdbc): method without auditlog registered for setting repository (#13543) 2025-12-10 15:52:42 +01:00
Nicolas K.
008404e442 feat(filters): add new operation to filters (#13424)
* feat(filters): add new operation to filters

* feat(filters): add flowId to flow search

* fix(test): fix unit test

---------

Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-12-10 15:17:32 +01:00
Bhuvan C V
2b224bcde8 test(core): add regression test for illegal namespace updates
Adds a missing test case to FlowTest.java to verify that Flow.validateUpdate() correctly prevents namespace modification.
2025-12-10 14:36:00 +01:00
dependabot[bot]
1977b61693 build(deps): bump software.amazon.awssdk:bom from 2.40.0 to 2.40.5
Bumps software.amazon.awssdk:bom from 2.40.0 to 2.40.5.

---
updated-dependencies:
- dependency-name: software.amazon.awssdk:bom
  dependency-version: 2.40.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 14:31:32 +01:00
dependabot[bot]
8e2267f86c build(deps): bump com.fasterxml.uuid:java-uuid-generator
Bumps [com.fasterxml.uuid:java-uuid-generator](https://github.com/cowtowncoder/java-uuid-generator) from 5.1.1 to 5.2.0.
- [Commits](https://github.com/cowtowncoder/java-uuid-generator/compare/java-uuid-generator-5.1.1...java-uuid-generator-5.2.0)

---
updated-dependencies:
- dependency-name: com.fasterxml.uuid:java-uuid-generator
  dependency-version: 5.2.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 14:30:56 +01:00
Miloš Paunović
24355c2a88 refactor(executions): improve the trigger cascader on the overview page (#13524)
Closes https://github.com/kestra-io/kestra/issues/12942.
Closes https://github.com/kestra-io/kestra/issues/13283.
Closes https://github.com/kestra-io/kestra/issues/13290.
Closes https://github.com/kestra-io/kestra/issues/13294.
2025-12-10 13:08:16 +01:00
dependabot[bot]
51adcfa908 build(deps): bump flyingSaucerVersion from 10.0.5 to 10.0.6
Bumps `flyingSaucerVersion` from 10.0.5 to 10.0.6.

Updates `org.xhtmlrenderer:flying-saucer-core` from 10.0.5 to 10.0.6
- [Release notes](https://github.com/flyingsaucerproject/flyingsaucer/releases)
- [Changelog](https://github.com/flyingsaucerproject/flyingsaucer/blob/main/CHANGELOG.md)
- [Commits](https://github.com/flyingsaucerproject/flyingsaucer/compare/v10.0.5...v10.0.6)

Updates `org.xhtmlrenderer:flying-saucer-pdf` from 10.0.5 to 10.0.6
- [Release notes](https://github.com/flyingsaucerproject/flyingsaucer/releases)
- [Changelog](https://github.com/flyingsaucerproject/flyingsaucer/blob/main/CHANGELOG.md)
- [Commits](https://github.com/flyingsaucerproject/flyingsaucer/compare/v10.0.5...v10.0.6)

---
updated-dependencies:
- dependency-name: org.xhtmlrenderer:flying-saucer-core
  dependency-version: 10.0.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
- dependency-name: org.xhtmlrenderer:flying-saucer-pdf
  dependency-version: 10.0.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 12:17:45 +01:00
dependabot[bot]
a55baa1f96 build(deps): bump software.amazon.awssdk.crt:aws-crt
Bumps [software.amazon.awssdk.crt:aws-crt](https://github.com/awslabs/aws-crt-java) from 0.40.1 to 0.40.3.
- [Release notes](https://github.com/awslabs/aws-crt-java/releases)
- [Commits](https://github.com/awslabs/aws-crt-java/compare/v0.40.1...v0.40.3)

---
updated-dependencies:
- dependency-name: software.amazon.awssdk.crt:aws-crt
  dependency-version: 0.40.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 12:17:21 +01:00
dependabot[bot]
32793fde18 build(deps): bump com.microsoft.playwright:playwright
Bumps [com.microsoft.playwright:playwright](https://github.com/microsoft/playwright-java) from 1.56.0 to 1.57.0.
- [Release notes](https://github.com/microsoft/playwright-java/releases)
- [Commits](https://github.com/microsoft/playwright-java/compare/v1.56.0...v1.57.0)

---
updated-dependencies:
- dependency-name: com.microsoft.playwright:playwright
  dependency-version: 1.57.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 12:15:37 +01:00
dependabot[bot]
4381d585ec build(deps): bump org.sonarqube from 7.1.0.6387 to 7.2.0.6526
Bumps org.sonarqube from 7.1.0.6387 to 7.2.0.6526.

---
updated-dependencies:
- dependency-name: org.sonarqube
  dependency-version: 7.2.0.6526
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 12:15:19 +01:00
dependabot[bot]
e595e26c45 build(deps): bump org.jooq:jooq from 3.20.9 to 3.20.10
Bumps org.jooq:jooq from 3.20.9 to 3.20.10.

---
updated-dependencies:
- dependency-name: org.jooq:jooq
  dependency-version: 3.20.10
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 12:15:00 +01:00
Miloš Paunović
b833cf28b5 chore(core): use the name property for tour flow element labels (#13569)
Closes https://github.com/kestra-io/kestra/issues/13546.
2025-12-10 10:46:36 +01:00
dependabot[bot]
ac11e9545c build(deps-dev): bump @types/node in /ui in the types group (#13564)
Bumps the types group in /ui with 1 update: [@types/node](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node).


Updates `@types/node` from 24.10.1 to 24.10.2
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/node)

---
updated-dependencies:
- dependency-name: "@types/node"
  dependency-version: 24.10.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: types
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-10 08:54:02 +01:00
dependabot[bot]
a07df5f6cd build(deps): bump the build group in /ui with 3 updates (#13563)
Bumps the build group in /ui with 3 updates: [@esbuild/darwin-arm64](https://github.com/evanw/esbuild), [@esbuild/darwin-x64](https://github.com/evanw/esbuild) and [@esbuild/linux-x64](https://github.com/evanw/esbuild).


Updates `@esbuild/darwin-arm64` from 0.27.0 to 0.27.1
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.0...v0.27.1)

Updates `@esbuild/darwin-x64` from 0.27.0 to 0.27.1
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.0...v0.27.1)

Updates `@esbuild/linux-x64` from 0.27.0 to 0.27.1
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.0...v0.27.1)

---
updated-dependencies:
- dependency-name: "@esbuild/darwin-arm64"
  dependency-version: 0.27.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@esbuild/darwin-x64"
  dependency-version: 0.27.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@esbuild/linux-x64"
  dependency-version: 0.27.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-10 08:53:46 +01:00
Bikash Agarwala
f626c85346 fix: allow zero-byte file uploads in execution inputs (fixes #8218) 2025-12-09 12:52:25 +01:00
Roman Acevedo
e15b53ebb5 ci: add missing GH_PERSONAL_TOKEN in main-build.yml 2025-12-09 10:55:43 +01:00
Roman Acevedo
7edb6bc379 fix(blueprints): bring back up edit button
- fixes https://github.com/kestra-io/kestra-ee/issues/6070
2025-12-09 10:45:27 +01:00
Roman Acevedo
78c81f932b ci: add GH_PERSONAL_TOKEN in release-docker.yml CI for helm chart 2025-12-09 10:14:33 +01:00
Shankar
56bb3ca29c Fix week format in filter 2025-12-09 10:00:37 +01:00
Loïc Mathieu
14029e8c14 chore(tests): isolate concurrency related tests in their own class 2025-12-09 09:57:49 +01:00
char zheng
bea3d63d89 fix(executions): concurrency limit exceeded for KILLED execution
Fixes #13211
2025-12-09 09:57:49 +01:00
Shivansh Sharma
24a3bbd303 11229 : Ensure that a label key cannot contain spaces, special characters or encoded values (e.g. emojis) 2025-12-09 09:32:22 +01:00
Nirnay
f9932af2e8 fix(ui): Updated the trigger button (#13521) 2025-12-09 13:56:24 +05:30
Avirup Banik
e0410c8f24 made system overview page responsive (#13527) 2025-12-09 12:10:25 +05:30
Loïc Mathieu
424a6cb41a fix(execution): skip the render cache in flowable for properties used to compute next tasks
As when the flowable is itself in a flowable that process tasks concurrently like the ForEach when using a concurrency limit, it can be done multiple time with different values.

This can only occurs if the expression is using `taskRun.value`.

Fixes https://github.com/kestra-io/kestra-ee/issues/6055
2025-12-08 15:03:06 +01:00
Debjyoti Shit
afde71e913 fix(core): skip login screen after initial setup and send to welcome (#13489)
Co-authored-by: Miloš Paunović <paun992@hotmail.com>
Co-authored-by: Piyush Bhaskar <impiyush0012@gmail.com>
2025-12-08 18:09:50 +05:30
Miloš Paunović
086c32e711 chore(flows): redirect with applied filters from the overview page (#13522)
Closes https://github.com/kestra-io/kestra/issues/13392.
2025-12-08 10:52:46 +01:00
github-actions[bot]
710abcfaac chore(core): localize to languages other than english (#13520)
Co-authored-by: GitHub Action <actions@github.com>
2025-12-08 14:26:03 +05:30
Kavyakapoor
be951d015c fix(core): make password requirement descriptive. (#13483) 2025-12-08 14:15:11 +05:30
Piyush Bhaskar
a07260bef4 fix(core): refine navigation for authentication and setup routes (#13517) 2025-12-08 14:13:28 +05:30
Piyush Bhaskar
dd19f8391d chore(version): bump ui-libs (#13518) 2025-12-08 14:11:59 +05:30
mustafatarek
354873e220 chore(core): remove unnecessary attempt list copying 2025-12-08 09:41:10 +01:00
luoxin
386d4a15f0 fix(system): enable parallel loading for namespace files. (#13375)
* perf(core): enable parallel loading for namespace files.

* refactor(core): extract thread count calculation to avoid duplication.

* resolve namespaceFilesWithNamespaces test error.

---------

Co-authored-by: luoxin5 <luoxin5@xiaomi.com>
2025-12-08 09:23:35 +01:00
Yaswanth B
1b75f15680 refactor(core): remove usage of unnecessary i18n composable (#13492)
Closes https://github.com/kestra-io/kestra/issues/13352.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-08 08:42:56 +01:00
Richard-Mackey
957bf74d97 fix(core): make menuCollapsed = true on small screen (#13238)
Co-authored-by: Piyush Bhaskar <impiyush0012@gmail.com>
2025-12-06 02:58:37 +05:30
205 changed files with 3567 additions and 2352 deletions

View File

@@ -51,7 +51,7 @@ updates:
storybook:
applies-to: version-updates
patterns: ["storybook*", "@storybook/*"]
patterns: ["storybook*", "@storybook/*", "eslint-plugin-storybook"]
vitest:
applies-to: version-updates
@@ -67,10 +67,10 @@ updates:
"@types/*",
"storybook*",
"@storybook/*",
"eslint-plugin-storybook",
"vitest",
"@vitest/*",
# Temporary exclusion of these packages from major updates
"eslint-plugin-storybook",
"eslint-plugin-vue",
]
@@ -84,6 +84,7 @@ updates:
"@types/*",
"storybook*",
"@storybook/*",
"eslint-plugin-storybook",
"vitest",
"@vitest/*",
# Temporary exclusion of these packages from minor updates
@@ -102,6 +103,7 @@ updates:
"@types/*",
"storybook*",
"@storybook/*",
"eslint-plugin-storybook",
"vitest",
"@vitest/*",
]

View File

@@ -64,6 +64,7 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
publish-develop-maven:

View File

@@ -32,3 +32,4 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}

View File

@@ -29,8 +29,8 @@ start_time2=$(date +%s)
echo "cd ./ui"
cd ./ui
echo "npm i"
npm i
echo "npm ci"
npm ci
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"

View File

@@ -21,7 +21,7 @@ plugins {
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.1.0.6387"
id "org.sonarqube" version "7.2.1.6560"
id 'jacoco-report-aggregation'
// helper
@@ -331,7 +331,7 @@ subprojects {
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.25"
agent "org.aspectj:aspectjweaver:1.9.25.1"
}
test {

View File

@@ -82,8 +82,8 @@ dependencies {
testImplementation "io.micronaut:micronaut-http-server-netty"
testImplementation "io.micronaut:micronaut-management"
testImplementation "org.testcontainers:testcontainers:1.21.3"
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
testImplementation "org.testcontainers:testcontainers:1.21.4"
testImplementation "org.testcontainers:junit-jupiter:1.21.4"
testImplementation "org.bouncycastle:bcpkix-jdk18on"
testImplementation "org.wiremock:wiremock-jetty12"

View File

@@ -3,6 +3,7 @@ package io.kestra.core.docs;
import io.kestra.core.models.annotations.PluginSubGroup;
import io.kestra.core.plugins.RegisteredPlugin;
import io.micronaut.core.annotation.Nullable;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.NoArgsConstructor;
@@ -117,10 +118,17 @@ public class Plugin {
.filter(not(io.kestra.core.models.Plugin::isInternal))
.filter(clazzFilter)
.filter(c -> !c.getName().startsWith("org.kestra."))
.map(c -> new PluginElementMetadata(c.getName(), io.kestra.core.models.Plugin.isDeprecated(c) ? true : null))
.map(c -> {
Schema schema = c.getAnnotation(Schema.class);
var title = Optional.ofNullable(schema).map(Schema::title).filter(t -> !t.isEmpty()).orElse(null);
var description = Optional.ofNullable(schema).map(Schema::description).filter(d -> !d.isEmpty()).orElse(null);
var deprecated = io.kestra.core.models.Plugin.isDeprecated(c) ? true : null;
return new PluginElementMetadata(c.getName(), deprecated, title, description);
})
.toList();
}
public record PluginElementMetadata(String cls, Boolean deprecated) {
}
public record PluginElementMetadata(String cls, Boolean deprecated, String title, String description) {}
}

View File

@@ -4,13 +4,16 @@ import io.kestra.core.utils.MapUtils;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Pattern;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
public record Label(@NotEmpty String key, @NotEmpty String value) {
public record Label(
@NotEmpty @Pattern(regexp = "^[\\p{Ll}][\\p{L}0-9._-]*$", message = "Invalid label key. A valid key contains only lowercase letters numbers hyphens (-) underscores (_) or periods (.) and must begin with a lowercase letter.") String key,
@NotEmpty String value) {
public static final String SYSTEM_PREFIX = "system.";
// system labels
@@ -23,6 +26,7 @@ public record Label(@NotEmpty String key, @NotEmpty String value) {
public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
public static final String TEST = SYSTEM_PREFIX + "test";
public static final String FROM = SYSTEM_PREFIX + "from";
/**
* Static helper method for converting a list of labels to a nested map.

View File

@@ -94,7 +94,7 @@ public record QueryFilter(
KIND("kind") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS,Op.NOT_EQUALS);
return List.of(Op.EQUALS,Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
}
},
LABELS("labels") {
@@ -106,7 +106,7 @@ public record QueryFilter(
FLOW_ID("flowId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
}
},
UPDATED("updated") {
@@ -226,7 +226,7 @@ public record QueryFilter(
FLOW {
@Override
public List<Field> supportedField() {
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE, Field.FLOW_ID);
}
},
NAMESPACE {
@@ -241,7 +241,7 @@ public record QueryFilter(
return List.of(
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
Field.NAMESPACE,Field.KIND
Field.NAMESPACE, Field.KIND
);
}
},

View File

@@ -16,6 +16,7 @@ import jakarta.validation.constraints.NotNull;
public class Setting {
public static final String INSTANCE_UUID = "instance.uuid";
public static final String INSTANCE_VERSION = "instance.version";
public static final String INSTANCE_EDITION = "instance.edition";
@NotNull
private String key;

View File

@@ -1,6 +1,5 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -130,7 +129,7 @@ public class Flow extends AbstractFlow implements HasUID {
@Valid
@PluginProperty
List<SLA> sla;
@Schema(
title = "Conditions evaluated before the flow is executed.",
description = "A list of conditions that are evaluated before the flow is executed. If no checks are defined, the flow executes normally."
@@ -355,7 +354,7 @@ public class Flow extends AbstractFlow implements HasUID {
* To be conservative a flow MUST not return any source.
*/
@Override
@JsonIgnore
@Schema(hidden = true)
public String getSource() {
return null;
}

View File

@@ -1,14 +1,12 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.micronaut.core.annotation.Introspected;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.Objects;
import java.util.regex.Pattern;
@SuperBuilder(toBuilder = true)
@Getter
@@ -48,7 +46,7 @@ public class FlowWithSource extends Flow {
}
@Override
@JsonIgnore(value = false)
@Schema(hidden = false)
public String getSource() {
return this.source;
}

View File

@@ -267,6 +267,10 @@ public class State {
return this == Type.RUNNING || this == Type.KILLING;
}
public boolean onlyRunning() {
return this == Type.RUNNING;
}
public boolean isFailed() {
return this == Type.FAILED;
}

View File

@@ -93,7 +93,7 @@ public class Property<T> {
* @return a new {@link Property} without a pre-rendered value
*/
public Property<T> skipCache() {
return Property.ofExpression(expression);
return new Property<>(expression, true);
}
/**

View File

@@ -82,6 +82,12 @@ abstract public class AbstractTrigger implements TriggerInterface {
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
private boolean failOnTriggerError = false;
@PluginProperty(group = PluginProperty.CORE_GROUP)
@Schema(
title = "Specifies whether a trigger is allowed to start a new execution even if a previous run is still in progress."
)
private boolean allowConcurrent = false;
/**
* For backward compatibility: we rename minLogLevel to logLevel.
* @deprecated use {@link #logLevel} instead

View File

@@ -1,22 +1,37 @@
package io.kestra.core.models.triggers;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.runners.RunContext;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.ZonedDateTime;
import java.util.Map;
public interface Schedulable extends PollingTriggerInterface{
String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules";
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
Map<String, Object> getInputs();
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
RecoverMissedSchedules getRecoverMissedSchedules();
/**
* Compute the previous evaluation of a trigger.
* This is used when a trigger misses some schedule to compute the next date to evaluate in the past.
*/
ZonedDateTime previousEvaluationDate(ConditionContext conditionContext) throws IllegalVariableEvaluationException;
RecoverMissedSchedules getRecoverMissedSchedules();
/**
* Load the default RecoverMissedSchedules from plugin property, or else ALL.
*/

View File

@@ -172,7 +172,7 @@ public class Trigger extends TriggerContext implements HasUID {
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
try {
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, lastTrigger);
} catch (InvalidTriggerConfigurationException e) {
disabled = true;
}

View File

@@ -6,12 +6,9 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.tasks.Output;
import io.kestra.core.models.flows.State;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.RunContext;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.util.*;
public abstract class TriggerService {
@@ -51,58 +48,6 @@ public abstract class TriggerService {
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
}
public static Execution generateScheduledExecution(
AbstractTrigger trigger,
ConditionContext conditionContext,
TriggerContext context,
List<Label> labels,
Map<String, Object> inputs,
Map<String, Object> variables,
Optional<ZonedDateTime> scheduleDate
) {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables);
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(context.getTenantId())
.namespace(context.getNamespace())
.flowId(context.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(scheduleDate.map(date -> date.toInstant()).orElse(null))
.build();
Map<String, Object> allInputs = new HashMap<>();
// add flow inputs with default value
var flow = conditionContext.getFlow();
if (flow.getInputs() != null) {
flow.getInputs().stream()
.filter(input -> input.getDefaults() != null)
.forEach(input -> allInputs.put(input.getId(), input.getDefaults()));
}
if (inputs != null) {
allInputs.putAll(inputs);
}
// add inputs and inject defaults
if (!allInputs.isEmpty()) {
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
}
return execution;
}
private static Execution generateExecution(
String id,
AbstractTrigger trigger,
@@ -111,6 +56,7 @@ public abstract class TriggerService {
ConditionContext conditionContext
) {
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, id));

View File

@@ -1,10 +1,10 @@
package io.kestra.core.repositories;
import io.kestra.core.models.Setting;
import jakarta.validation.ConstraintViolationException;
import java.util.List;
import java.util.Optional;
import jakarta.validation.ConstraintViolationException;
public interface SettingRepositoryInterface {
Optional<Setting> findByKey(String key);
@@ -13,5 +13,7 @@ public interface SettingRepositoryInterface {
Setting save(Setting setting) throws ConstraintViolationException;
Setting internalSave(Setting setting) throws ConstraintViolationException;
Setting delete(Setting setting);
}

View File

@@ -16,8 +16,8 @@ import java.util.function.Function;
public interface TriggerRepositoryInterface extends QueryBuilderInterface<Triggers.Fields> {
Optional<Trigger> findLast(TriggerContext trigger);
Optional<Trigger> findByExecution(Execution execution);
Optional<Trigger> findByUid(String uid);
List<Trigger> findAll(String tenantId);
List<Trigger> findAllForAllTenants();

View File

@@ -6,10 +6,12 @@ import com.google.common.base.CaseFormat;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.plugins.PluginConfigurations;
import io.kestra.core.services.KVStoreService;
import io.kestra.core.storages.Storage;
import io.kestra.core.storages.StorageInterface;
@@ -235,6 +237,14 @@ public class DefaultRunContext extends RunContext {
return runContext;
}
@Override
public RunContext cloneForPlugin(Plugin plugin) {
PluginConfigurations pluginConfigurations = applicationContext.getBean(PluginConfigurations.class);
DefaultRunContext runContext = clone();
runContext.pluginConfiguration = pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass());
return runContext;
}
/**
* {@inheritDoc}
*/
@@ -589,6 +599,11 @@ public class DefaultRunContext extends RunContext {
return localPath;
}
@Override
public InputAndOutput inputAndOutput() {
return new InputAndOutputImpl(this.applicationContext, this);
}
/**
* Builder class for constructing new {@link DefaultRunContext} objects.
*/

View File

@@ -189,12 +189,11 @@ public final class ExecutableUtils {
variables.put("taskRunIteration", currentTaskRun.getIteration());
}
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
Instant scheduleOnDate = runContext.render(scheduleDate).as(ZonedDateTime.class).map(date -> date.toInstant()).orElse(null);
Execution execution = Execution
.newExecution(
flow,
(f, e) -> flowInputOutput.readExecutionInputs(f, e, inputs),
(f, e) -> runContext.inputAndOutput().readInputs(f, e, inputs),
newLabels,
Optional.empty())
.withTrigger(ExecutionTrigger.builder()

View File

@@ -3,13 +3,11 @@ package io.kestra.core.runners;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Data;
import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Output;
import io.kestra.core.models.flows.RenderableInput;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.FileInput;
@@ -158,11 +156,7 @@ public class FlowInputOutput {
File tempFile = File.createTempFile(prefix, fileExtension);
try (var inputStream = fileUpload.getInputStream();
var outputStream = new FileOutputStream(tempFile)) {
long transferredBytes = inputStream.transferTo(outputStream);
if (transferredBytes == 0) {
sink.error(new KestraRuntimeException("Can't upload file: " + fileUpload.getFilename()));
return;
}
inputStream.transferTo(outputStream);
URI from = storageInterface.from(execution, inputId, fileName, tempFile);
sink.next(Map.entry(inputId, from.toString()));
} finally {
@@ -382,11 +376,11 @@ public class FlowInputOutput {
@SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
return Property.as((Property<T>) input.getDefaults().skipCache(), renderer, clazz);
}
@SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
return Property.asList((Property<List<T>>) input.getDefaults().skipCache(), renderer, clazz);
}
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
@@ -502,8 +496,8 @@ public class FlowInputOutput {
yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString()));
}
}
case JSON -> JacksonMapper.toObject(current.toString());
case YAML -> YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
case JSON -> (current instanceof Map || current instanceof Collection<?>) ? current : JacksonMapper.toObject(current.toString());
case YAML -> (current instanceof Map || current instanceof Collection<?>) ? current : YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
case URI -> {
Matcher matcher = URI_PATTERN.matcher(current.toString());
if (matcher.matches()) {
@@ -543,30 +537,6 @@ public class FlowInputOutput {
}
}
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
/**
* Mutable wrapper to hold a flow's input, and it's resolved value.
*/

View File

@@ -0,0 +1,29 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import java.util.List;
import java.util.Map;
/**
* InputAndOutput could be used to work with flow execution inputs and outputs.
*/
public interface InputAndOutput {
/**
* Reads the inputs of a flow execution.
*/
Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs);
/**
* Processes the outputs of a flow execution (parse them based on their types).
*/
Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs);
/**
* Render flow execution outputs.
*/
Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException;
}

View File

@@ -0,0 +1,56 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import io.micronaut.context.ApplicationContext;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class InputAndOutputImpl implements InputAndOutput {
private final FlowInputOutput flowInputOutput;
private final RunContext runContext;
InputAndOutputImpl(ApplicationContext applicationContext, RunContext runContext) {
this.flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
this.runContext = runContext;
}
@Override
public Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs) {
return flowInputOutput.readExecutionInputs(flow, execution, inputs);
}
@Override
public Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs) {
return flowInputOutput.typedOutputs(flow, execution, rOutputs);
}
@Override
public Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
}

View File

@@ -4,6 +4,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.property.PropertyContext;
@@ -204,4 +205,15 @@ public abstract class RunContext implements PropertyContext {
* when Namespace ACLs are used (EE).
*/
public abstract AclChecker acl();
/**
* Clone this run context for a specific plugin.
* @return a new run context with the plugin configuration of the given plugin.
*/
public abstract RunContext cloneForPlugin(Plugin plugin);
/**
* @return an InputAndOutput that can be used to work with inputs and outputs.
*/
public abstract InputAndOutput inputAndOutput();
}

View File

@@ -1,10 +1,8 @@
package io.kestra.core.runners;
import com.google.common.collect.Lists;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.runners.TaskRunner;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.plugins.PluginConfigurations;
@@ -53,20 +51,6 @@ public class RunContextInitializer {
@Value("${kestra.encryption.secret-key}")
protected Optional<String> secretKey;
/**
* Initializes the given {@link RunContext} for the given {@link Plugin}.
*
* @param runContext The {@link RunContext} to initialize.
* @param plugin The {@link TaskRunner} used for initialization.
* @return The {@link RunContext} to initialize
*/
public DefaultRunContext forPlugin(final DefaultRunContext runContext,
final Plugin plugin) {
runContext.init(applicationContext);
runContext.setPluginConfiguration(pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass()));
return runContext;
}
/**
* Initializes the given {@link RunContext} for the given {@link WorkerTask} for executor.
*

View File

@@ -55,11 +55,11 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
public RunContextLogger(QueueInterface<LogEntry> logQueue, LogEntry logEntry, org.slf4j.event.Level loglevel, boolean logToFile) {
if (logEntry.getTaskId() != null) {
this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTaskId();
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTaskId();
} else if (logEntry.getTriggerId() != null) {
this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTriggerId();
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTriggerId();
} else {
this.loggerName = "flow." + logEntry.getFlowId();
this.loggerName = baseLoggerName(logEntry);
}
this.logQueue = logQueue;
@@ -68,6 +68,10 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
this.logToFile = logToFile;
}
private String baseLoggerName(LogEntry logEntry) {
return "flow." + logEntry.getTenantId() + "." + logEntry.getNamespace() + "." + logEntry.getFlowId();
}
private static List<LogEntry> logEntry(ILoggingEvent event, String message, org.slf4j.event.Level level, LogEntry logEntry) {
Iterable<String> split;

View File

@@ -81,7 +81,24 @@ public final class YamlParser {
throw toConstraintViolationException(input, resource, e);
}
}
private static String formatYamlErrorMessage(String originalMessage, JsonProcessingException e) {
StringBuilder friendlyMessage = new StringBuilder();
if (originalMessage.contains("Expected a field name")) {
friendlyMessage.append("YAML syntax error: Invalid structure. Check indentation and ensure all fields are properly formatted.");
} else if (originalMessage.contains("MappingStartEvent")) {
friendlyMessage.append("YAML syntax error: Unexpected mapping start. Verify that scalar values are properly quoted if needed.");
} else if (originalMessage.contains("Scalar value")) {
friendlyMessage.append("YAML syntax error: Expected a simple value but found complex structure. Check for unquoted special characters.");
} else {
friendlyMessage.append("YAML parsing error: ").append(originalMessage.replaceAll("org\\.yaml\\.snakeyaml.*", "").trim());
}
if (e.getLocation() != null) {
int line = e.getLocation().getLineNr();
friendlyMessage.append(String.format(" (at line %d)", line));
}
// Return a generic but cleaner message for other YAML errors
return friendlyMessage.toString();
}
@SuppressWarnings("unchecked")
public static <T> ConstraintViolationException toConstraintViolationException(T target, String resource, JsonProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException constraintViolationException) {
@@ -121,11 +138,12 @@ public final class YamlParser {
)
));
} else {
String userFriendlyMessage = formatYamlErrorMessage(e.getMessage(), e);
return new ConstraintViolationException(
"Illegal " + resource + " source: " + e.getMessage(),
"Illegal " + resource + " source: " + userFriendlyMessage,
Collections.singleton(
ManualConstraintViolation.of(
e.getCause() == null ? e.getMessage() : e.getMessage() + "\nCaused by: " + e.getCause().getMessage(),
userFriendlyMessage,
target,
(Class<T>) target.getClass(),
"yaml",
@@ -136,4 +154,3 @@ public final class YamlParser {
}
}
}

View File

@@ -4,7 +4,6 @@ import com.cronutils.utils.VisibleForTesting;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
@@ -65,16 +64,6 @@ public class ConditionService {
return this.valid(flow, conditions, conditionContext);
}
/**
* Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated.
*/
public boolean isValid(List<ScheduleCondition> conditions, ConditionContext conditionContext) throws InternalException {
return conditions
.stream()
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
}
/**
* Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated.

View File

@@ -92,7 +92,14 @@ public class FlowService {
return flowRepository
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
}
private static String formatValidationError(String message) {
if (message.startsWith("Illegal flow source:")) {
// Already formatted by YamlParser, return as-is
return message;
}
// For other validation errors, provide context
return "Validation error: " + message;
}
/**
* Evaluates all checks defined in the given flow using the provided inputs.
* <p>
@@ -174,10 +181,12 @@ public class FlowService {
modelValidator.validate(pluginDefaultService.injectAllDefaults(flow, false));
} catch (ConstraintViolationException e) {
validateConstraintViolationBuilder.constraints(e.getMessage());
String friendlyMessage = formatValidationError(e.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
} catch (FlowProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException) {
validateConstraintViolationBuilder.constraints(e.getMessage());
if (e.getCause() instanceof ConstraintViolationException cve) {
String friendlyMessage = formatValidationError(cve.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
} else {
Throwable cause = e.getCause() != null ? e.getCause() : e;
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + cause.getMessage());
@@ -579,4 +588,4 @@ public class FlowService {
private IllegalStateException noRepositoryException() {
return new IllegalStateException("No repository found. Make sure the `kestra.repository.type` property is set.");
}
}
}

View File

@@ -1,6 +1,5 @@
package io.kestra.core.storages;
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
import io.kestra.core.services.NamespaceService;
import jakarta.annotation.Nullable;
import org.slf4j.Logger;
@@ -272,7 +271,13 @@ public class InternalStorage implements Storage {
return this.storage.put(context.getTenantId(), context.getNamespace(), resolve, new BufferedInputStream(inputStream));
}
@Override
public Optional<StorageContext.Task> getTaskStorageContext() {
return Optional.ofNullable((context instanceof StorageContext.Task task) ? task : null);
}
@Override
public List<FileAttributes> list(URI uri) throws IOException {
return this.storage.list(context.getTenantId(), context.getNamespace(), uri);
}
}

View File

@@ -173,4 +173,6 @@ public interface Storage {
* @return the task storage context
*/
Optional<StorageContext.Task> getTaskStorageContext();
List<FileAttributes> list(URI uri) throws IOException;
}

View File

@@ -1,13 +1,39 @@
package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.util.Optional;
@Singleton
public class EditionProvider {
public Edition get() {
return Edition.OSS;
}
@Inject
private Optional<SettingRepositoryInterface> settingRepository; // repositories are not always there on unit tests
@PostConstruct
void start() {
// check the edition in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
settingRepository.ifPresent(settingRepositoryInterface -> persistEdition(settingRepositoryInterface, get()));
}
private void persistEdition(SettingRepositoryInterface settingRepositoryInterface, Edition edition) {
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_EDITION);
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(edition)) {
settingRepositoryInterface.save(Setting.builder()
.key(Setting.INSTANCE_EDITION)
.value(edition)
.build()
);
}
}
public enum Edition {
OSS,
EE

View File

@@ -11,6 +11,11 @@ import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
/**
* Utility class to create {@link java.util.concurrent.ExecutorService} with {@link java.util.concurrent.ExecutorService} instances.
* WARNING: those instances will use the {@link ThreadUncaughtExceptionHandler} which terminates Kestra if an error occurs in any thread,
* so it should not be used inside plugins.
*/
@Singleton
@Slf4j
public class ExecutorsUtils {

View File

@@ -65,10 +65,9 @@ public class ListUtils {
}
public static List<String> convertToListString(Object object){
if (object instanceof List<?> list && (list.isEmpty() || list.getFirst() instanceof String)) {
return (List<String>) list;
} else {
throw new IllegalArgumentException("%s in not an instance of List of String".formatted(object));
}
return convertToList(object)
.stream()
.map(Object::toString)
.toList();
}
}

View File

@@ -10,7 +10,7 @@ import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
/**
* Utility class for logging
* Utility class for server logging
*/
public final class Logs {
@@ -18,7 +18,7 @@ public final class Logs {
private static final String EXECUTION_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[execution: {}] ";
private static final String TRIGGER_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[trigger: {}] ";
private static final String TASKRUN_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[task: {}] [execution: {}] [taskrun: {}] ";
private Logs() {}
public static void logExecution(FlowId flow, Logger logger, Level level, String message, Object... args) {
@@ -29,7 +29,7 @@ public final class Logs {
}
/**
* Log an {@link Execution} via the execution logger named: 'execution.{flowId}'.
* Log an {@link Execution} via the executor logger named: 'executor.{tenantId}.{namespace}.{flowId}'.
*/
public static void logExecution(Execution execution, Level level, String message, Object... args) {
Logger logger = logger(execution);
@@ -43,7 +43,7 @@ public final class Logs {
}
/**
* Log a {@link TriggerContext} via the trigger logger named: 'trigger.{flowId}.{triggereId}'.
* Log a {@link TriggerContext} via the scheduler logger named: 'trigger.{tenantId}.{namespace}.{flowId}.{triggerId}'.
*/
public static void logTrigger(TriggerContext triggerContext, Level level, String message, Object... args) {
Logger logger = logger(triggerContext);
@@ -57,7 +57,7 @@ public final class Logs {
}
/**
* Log a {@link TaskRun} via the taskRun logger named: 'task.{flowId}.{taskId}'.
* Log a {@link TaskRun} via the worker logger named: 'worker.{tenantId}.{namespace}.{flowId}.{taskId}'.
*/
public static void logTaskRun(TaskRun taskRun, Level level, String message, Object... args) {
String prefix = TASKRUN_PREFIX_WITH_TENANT;
@@ -73,19 +73,19 @@ public final class Logs {
private static Logger logger(TaskRun taskRun) {
return LoggerFactory.getLogger(
"task." + taskRun.getFlowId() + "." + taskRun.getTaskId()
"worker." + taskRun.getTenantId() + "." + taskRun.getNamespace() + "." + taskRun.getFlowId() + "." + taskRun.getTaskId()
);
}
private static Logger logger(TriggerContext triggerContext) {
return LoggerFactory.getLogger(
"trigger." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId()
"scheduler." + triggerContext.getTenantId() + "." + triggerContext.getNamespace() + "." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId()
);
}
private static Logger logger(Execution execution) {
return LoggerFactory.getLogger(
"execution." + execution.getFlowId()
"executor." + execution.getTenantId() + "." + execution.getNamespace() + "." + execution.getFlowId()
);
}
}

View File

@@ -120,7 +120,10 @@ public class MapUtils {
private static Collection<?> mergeCollections(Collection<?> colA, Collection<?> colB) {
List<Object> merged = new ArrayList<>(colA.size() + colB.size());
merged.addAll(colA);
merged.addAll(colB);
if (!colB.isEmpty()) {
List<?> filtered = colB.stream().filter(it -> !colA.contains(it)).toList();
merged.addAll(filtered);
}
return merged;
}

View File

@@ -1,14 +1,12 @@
package io.kestra.core.utils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.kestra.core.models.executions.metrics.Counter;
import io.kestra.core.models.executions.metrics.Timer;
import io.kestra.core.models.tasks.FileExistComportment;
import io.kestra.core.models.tasks.NamespaceFiles;
import io.kestra.core.runners.RunContext;
import io.kestra.core.storages.NamespaceFile;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.apache.commons.lang3.time.StopWatch;
@@ -19,26 +17,27 @@ import java.io.InputStream;
import java.nio.file.Path;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.*;
import static io.kestra.core.utils.Rethrow.throwConsumer;
@Singleton
public class NamespaceFilesUtils {
@Inject
private ExecutorsUtils executorsUtils;
public final class NamespaceFilesUtils {
private static final int maxThreads = Math.max(Runtime.getRuntime().availableProcessors() * 4, 32);
private static final ExecutorService EXECUTOR_SERVICE = new ThreadPoolExecutor(
0,
maxThreads,
60L,
TimeUnit.SECONDS,
new SynchronousQueue<>(),
new ThreadFactoryBuilder().setNameFormat("namespace-files").build()
);;
private ExecutorService executorService;
@PostConstruct
public void postConstruct() {
this.executorService = executorsUtils.maxCachedThreadPool(Math.max(Runtime.getRuntime().availableProcessors() * 4, 32), "namespace-file");
private NamespaceFilesUtils() {
// utility class pattern
}
public void loadNamespaceFiles(
public static void loadNamespaceFiles(
RunContext runContext,
NamespaceFiles namespaceFiles
)
@@ -63,7 +62,11 @@ public class NamespaceFilesUtils {
matchedNamespaceFiles.addAll(files);
}
// Use half of the available threads to avoid impacting concurrent tasks
int parallelism = maxThreads / 2;
Flux.fromIterable(matchedNamespaceFiles)
.parallel(parallelism)
.runOn(Schedulers.fromExecutorService(EXECUTOR_SERVICE))
.doOnNext(throwConsumer(nsFile -> {
InputStream content = runContext.storage().getFile(nsFile.uri());
Path path = folderPerNamespace ?
@@ -71,7 +74,7 @@ public class NamespaceFilesUtils {
Path.of(nsFile.path());
runContext.workingDir().putFile(path, content, fileExistComportment);
}))
.publishOn(Schedulers.fromExecutorService(executorService))
.sequential()
.blockLast();
Duration duration = stopWatch.getDuration();

View File

@@ -23,7 +23,6 @@ import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.services.StorageService;
import io.kestra.core.storages.FileAttributes;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.StorageSplitInterface;
import io.kestra.core.utils.GraphUtils;
import io.kestra.core.validations.NoSystemLabelValidation;
@@ -540,7 +539,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
.numberOfBatches((Integer) taskRun.getOutputs().get(ExecutableUtils.TASK_VARIABLE_NUMBER_OF_BATCHES));
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
FileSerde.write(bos, FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext));
FileSerde.write(bos, runContext.inputAndOutput().renderOutputs(flow.getOutputs()));
URI uri = runContext.storage().putFile(
new ByteArrayInputStream(bos.toByteArray()),
URI.create((String) taskRun.getOutputs().get("uri"))
@@ -602,9 +601,8 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
String subflowOutputsBase = (String) taskOutput.get(ExecutableUtils.TASK_VARIABLE_SUBFLOW_OUTPUTS_BASE_URI);
URI subflowOutputsBaseUri = URI.create(StorageContext.KESTRA_PROTOCOL + subflowOutputsBase + "/");
StorageInterface storage = ((DefaultRunContext) runContext).getApplicationContext().getBean(StorageInterface.class);
if (storage.exists(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri)) {
List<FileAttributes> list = storage.list(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri);
if (runContext.storage().isFileExist(subflowOutputsBaseUri)) {
List<FileAttributes> list = runContext.storage().list(subflowOutputsBaseUri);;
if (!list.isEmpty()) {
// Merge outputs from each sub-flow into a single stored in the internal storage.

View File

@@ -157,7 +157,7 @@ public class LoopUntil extends Task implements FlowableTask<LoopUntil.Output> {
public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException {
if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) {
String continueLoop = runContext.render(this.condition).as(String.class).orElse(null);
String continueLoop = runContext.render(this.condition).skipCache().as(String.class).orElse(null);
if (!TruthUtils.isTruthy(continueLoop)) {
return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow());
}

View File

@@ -63,7 +63,8 @@ import java.util.*;
- id: run_post_approval
type: io.kestra.plugin.scripts.shell.Commands
runner: PROCESS
taskRunner:
type: io.kestra.plugin.core.runner.Process
commands:
- echo "Manual approval received! Continuing the execution..."

View File

@@ -18,7 +18,6 @@ import io.kestra.core.models.tasks.ExecutableTask;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.ExecutableUtils;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.FlowMetaStoreInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecution;
@@ -38,7 +37,6 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import org.slf4j.event.Level;
import java.time.ZonedDateTime;
import java.util.Collections;
@@ -246,11 +244,11 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
try {
Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
var inputAndOutput = runContext.inputAndOutput();
Map<String, Object> rOutputs = inputAndOutput.renderOutputs(subflowOutputs);
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
if (flow.getOutputs() != null && flowInputOutput != null) {
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
if (flow.getOutputs() != null) {
rOutputs = inputAndOutput.typedOutputs(flow, execution, rOutputs);
}
builder.outputs(rOutputs);
} catch (Exception e) {

View File

@@ -123,7 +123,7 @@ public class Switch extends Task implements FlowableTask<Switch.Output> {
}
private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException {
return runContext.render(this.value).as(String.class).orElseThrow();
return runContext.render(this.value).skipCache().as(String.class).orElseThrow();
}
@Override

View File

@@ -260,8 +260,7 @@ public class WorkingDirectory extends Sequential implements NamespaceFilesInterf
}
if (this.namespaceFiles != null && !Boolean.FALSE.equals(runContext.render(this.namespaceFiles.getEnabled()).as(Boolean.class).orElse(true))) {
NamespaceFilesUtils namespaceFilesUtils = ((DefaultRunContext) runContext).getApplicationContext().getBean(NamespaceFilesUtils.class);
namespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles);
NamespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles);
}
if (this.inputFiles != null) {

View File

@@ -26,25 +26,28 @@ import java.util.concurrent.atomic.AtomicLong;
@Getter
@NoArgsConstructor
@Schema(
title = "Delete expired keys globally for a specific namespace.",
description = "This task will delete expired keys from the Kestra KV store. By default, it will only delete expired keys, but you can choose to delete all keys by setting `expiredOnly` to false. You can also filter keys by a specific pattern and choose to include child namespaces."
title = "Purge namespace files for one or multiple namespaces.",
description = "This task purges namespace files (and their versions) stored in Kestra. You can restrict the purge to specific namespaces (or a namespace glob pattern), optionally include child namespaces, and filter files by a glob pattern. The purge strategy is controlled via `behavior` (e.g. keep the last N versions and/or delete versions older than a given date)."
)
@Plugin(
examples = {
@Example(
title = "Delete expired keys globally for a specific namespace, with or without including child namespaces.",
title = "Purge old versions of namespace files for a namespace tree.",
full = true,
code = """
id: purge_kv_store
id: purge_namespace_files
namespace: system
tasks:
- id: purge_kv
type: io.kestra.plugin.core.kv.PurgeKV
expiredOnly: true
- id: purge_files
type: io.kestra.plugin.core.namespace.PurgeFiles
namespaces:
- company
includeChildNamespaces: true
filePattern: "**/*.sql"
behavior:
type: version
before: "2025-01-01T00:00:00Z"
"""
)
}
@@ -116,7 +119,7 @@ public class PurgeFiles extends Task implements PurgeTask<NamespaceFile>, Runnab
@Getter
public static class Output implements io.kestra.core.models.tasks.Output {
@Schema(
title = "The number of purged KV pairs"
title = "The number of purged namespace file versions"
)
private Long size;
}

View File

@@ -0,0 +1,107 @@
package io.kestra.plugin.core.trigger;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Label;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.Backfill;
import io.kestra.core.models.triggers.Schedulable;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.time.chrono.ChronoZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Factory class for constructing a new {@link Execution} from a {@link Schedulable} trigger.
*
* @see io.kestra.plugin.core.trigger.Schedule
* @see io.kestra.plugin.core.trigger.ScheduleOnDates
*/
final class SchedulableExecutionFactory {
static Execution createFailedExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext) throws IllegalVariableEvaluationException {
return Execution.builder()
.id(conditionContext.getRunContext().getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(SchedulableExecutionFactory.getLabels(trigger, conditionContext.getRunContext(), triggerContext.getBackfill(), conditionContext.getFlow()))
.state(new State().withState(State.Type.FAILED))
.build();
}
static Execution createExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext, Map<String, Object> variables, ZonedDateTime scheduleDate) throws IllegalVariableEvaluationException {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of((AbstractTrigger) trigger, variables);
List<Label> labels = getLabels(trigger, runContext, triggerContext.getBackfill(), conditionContext.getFlow());
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(Optional.ofNullable(scheduleDate).map(ChronoZonedDateTime::toInstant).orElse(null))
.build();
Map<String, Object> allInputs = getInputs(trigger, runContext, triggerContext.getBackfill());
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
execution = execution.withInputs(runContext.inputAndOutput().readInputs(conditionContext.getFlow(), execution, allInputs));
return execution;
}
private static Map<String, Object> getInputs(Schedulable trigger, RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (trigger.getInputs() != null) {
inputs.putAll(runContext.render(trigger.getInputs()));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private static List<Label> getLabels(Schedulable trigger, RunContext runContext, Backfill backfill, FlowInterface flow) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, flow, (AbstractTrigger) trigger);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
}

View File

@@ -6,9 +6,7 @@ import com.cronutils.model.time.ExecutionTime;
import com.cronutils.parser.CronParser;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.Example;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
@@ -16,12 +14,8 @@ import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.ConditionService;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.validations.ScheduleValidation;
import io.kestra.core.validations.TimezoneId;
@@ -29,6 +23,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Null;
import lombok.AccessLevel;
import lombok.*;
import lombok.experimental.SuperBuilder;
import lombok.extern.slf4j.Slf4j;
@@ -40,6 +35,8 @@ import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Stream;
import static io.kestra.core.utils.Rethrow.throwPredicate;
@Slf4j
@SuperBuilder
@ToString
@@ -224,11 +221,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty
@Deprecated
private List<ScheduleCondition> scheduleConditions;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs;
@Schema(
@@ -248,13 +241,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty
@Deprecated
private Map<String, Object> backfill;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules;
@Override
@@ -403,20 +390,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
if (!conditionResults) {
return Optional.empty();
}
} catch(InternalException ie) {
} catch (InternalException ie) {
// validate schedule condition can fail to render variables
// in this case, we return a failed execution so the trigger is not evaluated each second
runContext.logger().error("Unable to evaluate the Schedule trigger '{}'", this.getId(), ie);
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(generateLabels(runContext, conditionContext, backfill))
.state(new State().withState(State.Type.FAILED))
.build();
return Optional.of(execution);
return Optional.of(SchedulableExecutionFactory.createFailedExecution(this, conditionContext, triggerContext));
}
// recalculate true output for previous and next based on conditions
@@ -430,14 +408,12 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
variables = scheduleDates.toMap();
}
Execution execution = TriggerService.generateScheduledExecution(
Execution execution = SchedulableExecutionFactory.createExecution(
this,
conditionContext,
triggerContext,
generateLabels(runContext, conditionContext, backfill),
generateInputs(runContext, backfill),
variables,
Optional.empty()
null
);
return Optional.of(execution);
@@ -448,34 +424,6 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
return parser.parse(this.cron);
}
private List<Label> generateLabels(RunContext runContext, ConditionContext conditionContext, Backfill backfill) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, conditionContext.getFlow(), this);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
private Map<String, Object> generateInputs(RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (this.inputs != null) {
inputs.putAll(runContext.render(this.inputs));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private Optional<Output> scheduleDates(ExecutionTime executionTime, ZonedDateTime date) {
Optional<ZonedDateTime> next = executionTime.nextExecution(date.minus(Duration.ofSeconds(1)));
@@ -549,9 +497,9 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
Optional<ZonedDateTime> truePreviousNextDateWithCondition(ExecutionTime executionTime, ConditionContext conditionContext, ZonedDateTime toTestDate, boolean next) throws InternalException {
int upperYearBound = ZonedDateTime.now().getYear() + 10;
int lowerYearBound = ZonedDateTime.now().getYear() - 10;
while ((next && toTestDate.getYear() < upperYearBound) || (!next && toTestDate.getYear() > lowerYearBound)) {
Optional<ZonedDateTime> currentDate = next ?
executionTime.nextExecution(toTestDate) :
executionTime.lastExecution(toTestDate);
@@ -607,11 +555,10 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
private boolean validateScheduleCondition(ConditionContext conditionContext) throws InternalException {
if (conditions != null) {
ConditionService conditionService = ((DefaultRunContext)conditionContext.getRunContext()).getApplicationContext().getBean(ConditionService.class);
return conditionService.isValid(
conditions.stream().filter(c -> c instanceof ScheduleCondition).map(c -> (ScheduleCondition) c).toList(),
conditionContext
);
return conditions.stream()
.filter(c -> c instanceof ScheduleCondition)
.map(c -> (ScheduleCondition) c)
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
}
return true;

View File

@@ -10,7 +10,6 @@ import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.VoidOutput;
import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.validations.TimezoneId;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
@@ -23,7 +22,10 @@ import java.time.Duration;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Predicate;
import static io.kestra.core.utils.Rethrow.throwFunction;
@@ -45,11 +47,7 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@Builder.Default
@Null
private final Duration interval = null;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs;
@TimezoneId
@@ -63,31 +61,24 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@NotNull
private Property<List<ZonedDateTime>> dates;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules;
@Override
public Optional<Execution> evaluate(ConditionContext conditionContext, TriggerContext triggerContext) throws Exception {
RunContext runContext = conditionContext.getRunContext();
ZonedDateTime lastEvaluation = triggerContext.getDate();
Optional<ZonedDateTime> nextDate = nextDate(runContext, date -> date.isEqual(lastEvaluation) || date.isAfter(lastEvaluation));
if (nextDate.isPresent()) {
log.info("Schedule execution on {}", nextDate.get());
Execution execution = TriggerService.generateScheduledExecution(
Execution execution = SchedulableExecutionFactory.createExecution(
this,
conditionContext,
triggerContext,
LabelService.fromTrigger(runContext, conditionContext.getFlow(), this),
this.inputs != null ? runContext.render(this.inputs) : Collections.emptyMap(),
Collections.emptyMap(),
nextDate
nextDate.orElse(null)
);
return Optional.of(execution);
@@ -97,29 +88,21 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
}
@Override
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) {
try {
return last
.map(throwFunction(context ->
nextDate(conditionContext.getRunContext(), date -> date.isAfter(context.getDate()))
.orElse(ZonedDateTime.now().plusYears(1))
))
.orElse(conditionContext.getRunContext()
.render(dates)
.asList(ZonedDateTime.class)
.stream()
.sorted()
.findFirst()
.orElse(ZonedDateTime.now()))
.truncatedTo(ChronoUnit.SECONDS);
} catch (IllegalVariableEvaluationException e) {
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
return ZonedDateTime.now().plusYears(1);
}
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> triggerContext) {
return triggerContext
.map(ctx -> ctx.getBackfill() != null ? ctx.getBackfill().getCurrentDate() : ctx.getDate())
.map(this::withTimeZone)
.or(() -> Optional.of(ZonedDateTime.now()))
.flatMap(dt -> {
try {
return nextDate(conditionContext.getRunContext(), date -> date.isAfter(dt));
} catch (IllegalVariableEvaluationException e) {
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
throw new InvalidTriggerConfigurationException("Failed to evaluate schedule 'dates'. Cause: " + e.getMessage());
}
}).orElseGet(() -> ZonedDateTime.now().plusYears(1));
}
@Override
public ZonedDateTime nextEvaluationDate() {
// TODO this may be the next date from now?
@@ -139,9 +122,17 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
return previousDates.isEmpty() ? ZonedDateTime.now() : previousDates.getFirst();
}
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> filter) throws IllegalVariableEvaluationException {
return runContext.render(dates).asList(ZonedDateTime.class).stream().sorted()
.filter(date -> filter.test(date))
private ZonedDateTime withTimeZone(ZonedDateTime date) {
if (this.timezone == null) {
return date;
}
return date.withZoneSameInstant(ZoneId.of(this.timezone));
}
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> predicate) throws IllegalVariableEvaluationException {
return runContext.render(dates)
.asList(ZonedDateTime.class).stream().sorted()
.filter(predicate)
.map(throwFunction(date -> timezone == null ? date : date.withZoneSameInstant(ZoneId.of(runContext.render(timezone)))))
.findFirst()
.map(date -> date.truncatedTo(ChronoUnit.SECONDS));

View File

@@ -9,10 +9,14 @@
<property name="pattern" value="%date{HH:mm:ss}.%ms %highlight(%-5.5level) %magenta(%-12.36thread) %cyan(%-12.36logger{36}) %msg%n" />
<logger name="io.kestra" level="INFO" />
<logger name="flow" level="INFO" />
<logger name="task" level="INFO" />
<logger name="execution" level="INFO" />
<logger name="trigger" level="INFO" />
<!-- Flow execution logs - disabled by default -->
<logger name="flow" level="OFF" />
<!-- Server loggers -->
<logger name="worker" level="INFO" />
<logger name="executor" level="INFO" />
<logger name="scheduler" level="INFO" />
<logger name="io.kestra.ee.runner.kafka.services.KafkaConsumerService" level="WARN" />
<logger name="io.kestra.ee.runner.kafka.services.KafkaProducerService" level="WARN" />

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.chart
name: "chart"
title: "Chart"
description: "Tasks that render dashboard charts from Kestra data sources."
body: "Use these chart widgets to visualize metrics, executions, or flow trends in dashboards; pair them with dashboard data queries and configure aggregations, groupings, and chart options for Bar, Pie, Time Series, KPI, or Table outputs."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.condition
name: "condition"
title: "Condition"
description: "Tasks that evaluate conditions to control flow execution or triggers."
body: "Use these predicates to gate tasks or triggers based on time windows, calendars, execution metadata, labels, namespaces, retries, or custom expressions; configure required parameters such as allowed states, namespaces, date ranges, or JEXL expressions to return a true/false result."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.data
name: "data"
title: "Data"
description: "Tasks that fetch Kestra executions, flows, logs, metrics, and triggers as datasets for dashboards."
body: "These data providers query Kestra repositories with filters and aggregations to feed dashboard charts; configure columns and fields (such as namespace, state, timestamp, or labels) plus any filters to shape the returned dataset for visualization."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.debug
name: "debug"
title: "Debug"
description: "Tasks that emit debug output while you develop a flow."
body: "Echo and Return help inspect variables and payloads or short-circuit execution during testing; provide the message or value to output so downstream tasks can see exactly what is being passed around."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.execution
name: "execution"
title: "Execution"
description: "Tasks that manage the lifecycle and context of a running execution."
body: "Use these tasks to assert expectations, set or unset variables, add labels, fail, exit, resume, or purge executions; supply required properties such as variable maps, label key/values, or retention rules before altering execution state."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.flow
name: "flow"
title: "Flow"
description: "Tasks that orchestrate control flow within a Kestra pipeline."
body: "Sequence, branch, loop, parallelize, or nest subflows/templates using these primitives; define embedded task lists, values for switches, iteration collections, working directories, and loop exit criteria to structure complex workflows cleanly."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.http
name: "http"
title: "HTTP"
description: "Tasks that interact with HTTP endpoints."
body: "Perform requests, downloads, or webhook triggers with configurable methods, headers, authentication, and payloads; provide the target URI plus any body or query parameters, and use response handling options to store results for downstream tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core
name: "core"
title: "Core Plugins and tasks"
description: "Tasks that provide Kestra's built-in orchestration, I/O, and observability capabilities."
body: "Core plugins cover control-flow, execution management, triggers, storage, HTTP, metrics, logging, templating, and dashboard widgets; combine these foundational tasks to build reliable workflows without adding external dependencies."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.kv
name: "kv"
title: "KV"
description: "Tasks that manage key-value pairs in Kestra's KV store."
body: "Set, get, list, version, and delete namespaced keys to share state across flows; specify the key path, value for writes, and optional namespace or TTL to control how data is stored, retrieved, and purged."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.log
name: "log"
title: "Log"
description: "Tasks that write, fetch, or purge Kestra logs."
body: "Emit structured log messages, retrieve stored logs, or clean up log storage; provide message content or log query filters and consider namespace or execution scoping when purging."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.metric
name: "metric"
title: "Metric"
description: "Tasks that publish custom metrics from flows."
body: "Send counters, gauges, and timing metrics to Kestra's metric store for dashboards and alerts; define the metric name, type, value, labels, and optional timestamp to record meaningful telemetry."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.namespace
name: "namespace"
title: "Namespace"
description: "Tasks that manage namespace files and versions."
body: "Upload, download, delete, purge, or version files stored in a namespace—useful for shipping assets or configs with flows; set the target namespace, paths or glob patterns, and purge behavior to control stored artifacts."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.output
name: "output"
title: "Output"
description: "Tasks that expose outputs from a flow."
body: "Use OutputValues to publish key-value outputs for downstream tasks or subflows; declare the output map and data types that consuming tasks should read."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.runner
name: "runner"
title: "Runner"
description: "Tasks that execute commands on the Kestra worker."
body: "Run shell processes with configurable command, environment, working directory, and input/output handling; ensure commands are idempotent and set expected exit codes or resource needs when invoking external binaries."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.storage
name: "storage"
title: "Storage"
description: "Tasks that manipulate files in Kestra's internal storage."
body: "Write, delete, concatenate, split, deduplicate, filter, reverse, size, or list files used by executions; provide source and target storage URIs and any encoding or line-handling options to transform stored data safely."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.templating
name: "templating"
title: "Templating"
description: "Tasks that render dynamic task specifications from templates."
body: "TemplatedTask lets you supply a Pebble-rendered YAML spec that is parsed and executed at runtime; provide the `spec` property with a valid runnable task definition and avoid recursive templating when composing dynamic tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -0,0 +1,8 @@
group: io.kestra.plugin.core.trigger
name: "trigger"
title: "Trigger"
description: "Tasks that start flows from schedules or events."
body: "Define cron-based schedules, specific date triggers, webhooks, namespace flow triggers, or toggles; set required properties like cron expressions, webhook secrets, and target flow references to control when executions fire."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -170,10 +170,11 @@ class JsonSchemaGeneratorTest {
Map<String, Object> jsonSchema = jsonSchemaGenerator.generate(AbstractTrigger.class, AbstractTrigger.class);
assertThat((Map<String, Object>) jsonSchema.get("properties"), allOf(
Matchers.aMapWithSize(3),
Matchers.aMapWithSize(4),
hasKey("conditions"),
hasKey("stopAfter"),
hasKey("type")
hasKey("type"),
hasKey("allowConcurrent")
));
});
}

View File

@@ -134,4 +134,47 @@ class LabelTest {
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
assertThat(emptyKeyLabelResult.isPresent()).isTrue();
}
@Test
void shouldValidateValidLabelKeys() {
// Valid keys: start with lowercase; may contain letters, numbers, hyphens, underscores, periods
assertThat(modelValidator.isValid(new Label("foo", "bar")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo_bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar_baz123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("a", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo.bar", "value")).isPresent()).isFalse(); // dot is allowed
}
@Test
void shouldRejectInvalidLabelKeys() {
Optional<ConstraintViolationException> spaceResult = modelValidator.isValid(new Label("foo bar", "value"));
assertThat(spaceResult.isPresent()).isTrue();
Optional<ConstraintViolationException> uppercaseResult = modelValidator.isValid(new Label("Foo", "value"));
assertThat(uppercaseResult.isPresent()).isTrue();
Optional<ConstraintViolationException> emojiResult = modelValidator.isValid(new Label("💩", "value"));
assertThat(emojiResult.isPresent()).isTrue();
Optional<ConstraintViolationException> atSignResult = modelValidator.isValid(new Label("foo@bar", "value"));
assertThat(atSignResult.isPresent()).isTrue();
Optional<ConstraintViolationException> colonResult = modelValidator.isValid(new Label("foo:bar", "value"));
assertThat(colonResult.isPresent()).isTrue();
Optional<ConstraintViolationException> hyphenStartResult = modelValidator.isValid(new Label("-foo", "value"));
assertThat(hyphenStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> underscoreStartResult = modelValidator.isValid(new Label("_foo", "value"));
assertThat(underscoreStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> zeroResult = modelValidator.isValid(new Label("0", "value"));
assertThat(zeroResult.isPresent()).isTrue();
Optional<ConstraintViolationException> digitStartResult = modelValidator.isValid(new Label("9test", "value"));
assertThat(digitStartResult.isPresent()).isTrue();
}
}

View File

@@ -61,6 +61,9 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION),
@@ -168,9 +171,6 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION),

View File

@@ -185,4 +185,21 @@ class FlowTest {
return YamlParser.parse(file, Flow.class);
}
@Test
void illegalNamespaceUpdate() {
Flow original = Flow.builder()
.id("my-flow")
.namespace("io.kestra.prod")
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("hello").build()))
.build();
Flow updated = original.toBuilder()
.namespace("io.kestra.dev")
.build();
Optional<ConstraintViolationException> validate = original.validateUpdate(updated);
assertThat(validate.isPresent()).isTrue();
assertThat(validate.get().getMessage()).contains("Illegal namespace update");
}
}

View File

@@ -60,6 +60,15 @@ class SystemInformationReportTest {
return setting;
}
@Override
public Setting internalSave(Setting setting) throws ConstraintViolationException {
if (setting.getKey().equals(Setting.INSTANCE_UUID)) {
UUID = setting.getValue();
}
return setting;
}
@Override
public Setting delete(Setting setting) {
return setting;

View File

@@ -1,9 +1,9 @@
package io.kestra.core.repositories;
import com.devskiller.friendly_id.FriendlyId;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.InvalidQueryFiltersException;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.Label;
import io.kestra.core.models.QueryFilter;
@@ -24,7 +24,6 @@ import io.kestra.core.models.flows.State.Type;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.NamespaceUtils;
import io.kestra.core.utils.TestsUtils;
@@ -42,18 +41,17 @@ import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.event.Level;
import java.io.IOException;
import java.sql.Timestamp;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.time.Duration;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static io.kestra.core.models.flows.FlowScope.SYSTEM;
import static io.kestra.core.models.flows.FlowScope.USER;
import static java.time.temporal.ChronoUnit.MINUTES;
import static java.time.temporal.ChronoUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -81,6 +79,7 @@ public abstract class AbstractExecutionRepositoryTest {
.tenantId(tenantId)
.flowId(flowId == null ? FLOW : flowId)
.flowRevision(1)
.kind(ExecutionKind.NORMAL)
.state(finalState);
@@ -184,6 +183,7 @@ public abstract class AbstractExecutionRepositoryTest {
@ParameterizedTest
@MethodSource("filterCombinations")
@FlakyTest(description = "Filtering tests are sometimes returning 0")
void should_find_all(QueryFilter filter, int expectedSize){
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
inject(tenant, "executionTriggerId");
@@ -196,15 +196,49 @@ public abstract class AbstractExecutionRepositoryTest {
static Stream<Arguments> filterCombinations() {
return Stream.of(
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unittest").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unused").operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(USER)).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.unittest").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("not.this.one").operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittes").operation(Op.CONTAINS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.uni").operation(Op.STARTS_WITH).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittest").operation(Op.ENDS_WITH).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io\\.kestra\\.unittest").operation(Op.REGEX).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("io.kestra.unittest", "unused")).operation(Op.IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("unused.first", "unused.second")).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra").operation(Op.PREFIX).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.NORMAL).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.TEST).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.NORMAL, ExecutionKind.PLAYGROUND)).operation(Op.IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.PLAYGROUND, ExecutionKind.TEST)).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "unknown")).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value", "key2", "value2")).operation(Op.IN).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key1", "value1")).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value("value").operation(Op.CONTAINS).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.EQUALS).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.NOT_EQUALS).build(), 13),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ul").operation(Op.CONTAINS).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.STARTS_WITH).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ull").operation(Op.ENDS_WITH).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("[ful]{4}").operation(Op.REGEX).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other")).operation(Op.IN).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other2")).operation(Op.NOT_IN).build(), 13),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.PREFIX).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.STATE).value(Type.RUNNING).operation(Op.EQUALS).build(), 5),
Arguments.of(QueryFilter.builder().field(Field.TRIGGER_EXECUTION_ID).value("executionTriggerId").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29)
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.NOT_EQUALS).build(), 0)
);
}
@@ -656,6 +690,65 @@ public abstract class AbstractExecutionRepositoryTest {
assertThat(data).first().hasFieldOrPropertyWithValue("id", execution.getId());
}
@Test
void dashboard_fetchData_365Days_verifiesDateGrouping() throws IOException {
var tenantId = TestsUtils.randomTenant(this.getClass().getSimpleName());
var executionDuration = Duration.ofMinutes(220);
var executionCreateDate = Instant.now();
// Create an execution within the 365-day range
Execution execution = Execution.builder()
.tenantId(tenantId)
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.flowId("some-execution")
.flowRevision(1)
.labels(Label.from(Map.of("country", "FR")))
.state(new State(Type.SUCCESS,
List.of(new State.History(State.Type.CREATED, executionCreateDate), new State.History(Type.SUCCESS, executionCreateDate.plus(executionDuration)))))
.taskRunList(List.of())
.build();
execution = executionRepository.save(execution);
// Create an execution BEYOND 365 days (400 days ago) - should be filtered out
var executionCreateDateOld = Instant.now().minus(Duration.ofDays(400));
Execution executionOld = Execution.builder()
.tenantId(tenantId)
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.flowId("some-execution-old")
.flowRevision(1)
.labels(Label.from(Map.of("country", "US")))
.state(new State(Type.SUCCESS,
List.of(new State.History(State.Type.CREATED, executionCreateDateOld), new State.History(Type.SUCCESS, executionCreateDateOld.plus(executionDuration)))))
.taskRunList(List.of())
.build();
executionRepository.save(executionOld);
var now = ZonedDateTime.now();
ArrayListTotal<Map<String, Object>> data = executionRepository.fetchData(tenantId, Executions.builder()
.type(Executions.class.getName())
.columns(Map.of(
"count", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).agg(AggregationType.COUNT).build(),
"id", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).build(),
"date", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.START_DATE).build(),
"duration", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.DURATION).build()
)).build(),
now.minusDays(365),
now,
null
);
// Should only return 1 execution (the recent one), not the 400-day-old execution
assertThat(data.getTotal()).isGreaterThanOrEqualTo(1L);
assertThat(data).isNotEmpty();
assertThat(data).first().hasFieldOrProperty("count");
}
private static Execution buildWithCreatedDate(String tenant, Instant instant) {
return Execution.builder()
.id(IdUtils.create())

View File

@@ -121,7 +121,8 @@ public abstract class AbstractFlowRepositoryTest {
QueryFilter.builder().field(Field.QUERY).value("filterFlowId").operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.NAMESPACE).value(SYSTEM_FLOWS_DEFAULT_NAMESPACE).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build()
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.FLOW_ID).value("filterFlowId").operation(Op.EQUALS).build()
);
}
@@ -145,7 +146,6 @@ public abstract class AbstractFlowRepositoryTest {
static Stream<QueryFilter> errorFilterCombinations() {
return Stream.of(
QueryFilter.builder().field(Field.FLOW_ID).value("sleep").operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(),
QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(),
QueryFilter.builder().field(Field.STATE).value(State.Type.RUNNING).operation(Op.EQUALS).build(),

View File

@@ -0,0 +1,92 @@
package io.kestra.core.runners;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
@KestraTest(startRunner = true)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public abstract class AbstractRunnerConcurrencyTest {
@Inject
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-cancel.yml"}, tenantId = "concurrency-cancel")
void concurrencyCancel() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancel("concurrency-cancel");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-fail.yml"}, tenantId = "concurrency-fail")
void concurrencyFail() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyFail("concurrency-fail");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue.yml"}, tenantId = "concurrency-queue")
void concurrencyQueue() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueue("concurrency-queue");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-pause.yml"}, tenantId = "concurrency-queue-pause")
protected void concurrencyQueuePause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueuePause("concurrency-queue-pause");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-cancel-pause.yml"}, tenantId = "concurrency-cancel-pause")
protected void concurrencyCancelPause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancelPause("concurrency-cancel-pause");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = "flow-concurrency-with-for-each-item")
protected void flowConcurrencyWithForEachItem() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem("flow-concurrency-with-for-each-item");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-fail.yml"}, tenantId = "concurrency-queue-restarted")
protected void concurrencyQueueRestarted() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted("concurrency-queue-restarted");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-after-execution.yml"}, tenantId = "concurrency-queue-after-execution")
void concurrencyQueueAfterExecution() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution("concurrency-queue-after-execution");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = "flow-concurrency-subflow")
void flowConcurrencySubflow() throws Exception {
flowConcurrencyCaseTest.flowConcurrencySubflow("flow-concurrency-subflow");
}
@Test
@FlakyTest(description = "Only flaky in CI")
@LoadFlows(
value = {"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"},
tenantId = "flow-concurrency-parallel-subflow-kill"
)
protected void flowConcurrencyParallelSubflowKill() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill("flow-concurrency-parallel-subflow-kill");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-killed.yml"}, tenantId = "flow-concurrency-killed")
void flowConcurrencyKilled() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyKilled("flow-concurrency-killed");
}
@Test
@FlakyTest(description = "Only flaky in CI")
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-killed.yml"}, tenantId = "flow-concurrency-queue-killed")
void flowConcurrencyQueueKilled() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueKilled("flow-concurrency-queue-killed");
}
}

View File

@@ -66,9 +66,6 @@ public abstract class AbstractRunnerTest {
@Inject
protected LoopUntilCaseTest loopUntilTestCaseTest;
@Inject
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
@Inject
protected ScheduleDateCaseTest scheduleDateCaseTest;
@@ -422,66 +419,6 @@ public abstract class AbstractRunnerTest {
forEachItemCaseTest.forEachItemWithAfterExecution();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel.yml"})
void concurrencyCancel() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancel();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-fail.yml"})
void concurrencyFail() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyFail();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue.yml"})
void concurrencyQueue() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueue();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-pause.yml"})
protected void concurrencyQueuePause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueuePause();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel-pause.yml"})
protected void concurrencyCancelPause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancelPause();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
protected void flowConcurrencyWithForEachItem() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
protected void concurrencyQueueRestarted() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-after-execution.yml"})
void concurrencyQueueAfterExecution() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
void flowConcurrencySubflow() throws Exception {
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"})
void flowConcurrencyParallelSubflowKill() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill();
}
@Test
@ExecuteFlow("flows/valids/executable-fail.yml")
void badExecutable(Execution execution) {

View File

@@ -31,7 +31,6 @@ import java.util.Optional;
import java.util.concurrent.TimeoutException;
import java.util.stream.IntStream;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
@Singleton
@@ -57,40 +56,42 @@ public class FlowConcurrencyCaseTest {
@Named(QueueFactoryInterface.KILL_NAMED)
protected QueueInterface<ExecutionKilled> killQueue;
public void flowConcurrencyCancel() throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
public void flowConcurrencyCancel(String tenantId) throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
try {
List<Execution> shouldFailExecutions = List.of(
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel"),
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel")
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-cancel"),
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-cancel")
);
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(Type.CANCELLED::equals);
} finally {
runnerUtils.killExecution(execution1);
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
}
}
public void flowConcurrencyFail() throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
public void flowConcurrencyFail(String tenantId) throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
try {
List<Execution> shouldFailExecutions = List.of(
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail"),
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail")
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-fail"),
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-fail")
);
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(State.Type.FAILED::equals);
} finally {
runnerUtils.killExecution(execution1);
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
}
}
public void flowConcurrencyQueue() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
public void flowConcurrencyQueue(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", Optional.empty())
.findById(tenantId, NAMESPACE, "flow-concurrency-queue", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
@@ -106,10 +107,10 @@ public class FlowConcurrencyCaseTest {
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyQueuePause() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause");
public void flowConcurrencyQueuePause(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(tenantId, NAMESPACE, "flow-concurrency-queue-pause");
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
@@ -125,10 +126,10 @@ public class FlowConcurrencyCaseTest {
assertThat(secondExecutionResult.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyCancelPause() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause");
public void flowConcurrencyCancelPause(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(tenantId, NAMESPACE, "flow-concurrency-cancel-pause");
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
.findById(tenantId, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.CANCELLED), execution2);
@@ -164,11 +165,11 @@ public class FlowConcurrencyCaseTest {
.toList()).contains(Type.QUEUED);
}
public void flowConcurrencyQueueRestarted() throws Exception {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE,
public void flowConcurrencyQueueRestarted(String tenantId) throws Exception {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE,
"flow-concurrency-queue-fail", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
@@ -177,7 +178,10 @@ public class FlowConcurrencyCaseTest {
// we restart the first one, it should be queued then fail again.
Execution failedExecution = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution1);
Execution restarted = executionService.restart(failedExecution, null);
Execution executionResult1 = runnerUtils.restartExecution(e -> e.getState().getCurrent().equals(Type.FAILED), restarted);
Execution executionResult1 = runnerUtils.restartExecution(
e -> e.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED) && e.getState().getCurrent().equals(Type.FAILED),
restarted
);
Execution executionResult2 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution2);
assertThat(executionResult1.getState().getCurrent()).isEqualTo(Type.FAILED);
@@ -191,10 +195,10 @@ public class FlowConcurrencyCaseTest {
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyQueueAfterExecution() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
public void flowConcurrencyQueueAfterExecution(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
@@ -214,15 +218,15 @@ public class FlowConcurrencyCaseTest {
List<Execution> subFlowExecs = runnerUtils.awaitFlowExecutionNumber(2, tenantId, NAMESPACE, "flow-concurrency-cancel");
assertThat(subFlowExecs).extracting(e -> e.getState().getCurrent()).containsExactlyInAnyOrder(Type.SUCCESS, Type.CANCELLED);
// run another execution to be sure that everything work (purge is correctly done)
// run another execution to be sure that everything works (purge is correctly done)
Execution execution3 = runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-subflow");
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.SUCCESS);
runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE, "flow-concurrency-cancel");
}
public void flowConcurrencyParallelSubflowKill() throws QueueException {
Execution parent = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-parallel-subflow-kill", null, null, Duration.ofSeconds(30));
Execution queued = runnerUtils.awaitFlowExecution(e -> e.getState().isQueued(), MAIN_TENANT, NAMESPACE, "flow-concurrency-parallel-subflow-kill-child");
public void flowConcurrencyParallelSubflowKill(String tenantId) throws QueueException {
Execution parent = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-parallel-subflow-kill", null, null, Duration.ofSeconds(30));
Execution queued = runnerUtils.awaitFlowExecution(e -> e.getState().isQueued(), tenantId, NAMESPACE, "flow-concurrency-parallel-subflow-kill-child");
// Kill the parent
killQueue.emit(ExecutionKilledExecution
@@ -230,7 +234,7 @@ public class FlowConcurrencyCaseTest {
.state(ExecutionKilled.State.REQUESTED)
.executionId(parent.getId())
.isOnKillCascade(true)
.tenantId(MAIN_TENANT)
.tenantId(tenantId)
.build()
);
@@ -240,6 +244,92 @@ public class FlowConcurrencyCaseTest {
assertThat(terminated.getTaskRunList()).isNull();
}
public void flowConcurrencyKilled(String tenantId) throws QueueException, InterruptedException {
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
.orElseThrow();
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
try {
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
// we kill execution 1, execution 2 should run but not execution 3
killQueue.emit(ExecutionKilledExecution
.builder()
.state(ExecutionKilled.State.REQUESTED)
.executionId(execution1.getId())
.isOnKillCascade(true)
.tenantId(tenantId)
.build()
);
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution1);
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
assertThat(killed.getState().getHistories().stream().anyMatch(h -> h.getState() == Type.RUNNING)).isTrue();
// we now check that execution 2 is running
Execution running = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
assertThat(running.getState().getCurrent()).isEqualTo(Type.RUNNING);
// we check that execution 3 is still queued
Thread.sleep(100); // wait a little to be 100% sure
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution2);
runnerUtils.killExecution(execution3);
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
runnerUtils.awaitFlowExecutionNumber(3, tenantId, NAMESPACE, "flow-concurrency-queue-killed");
}
}
public void flowConcurrencyQueueKilled(String tenantId) throws QueueException, InterruptedException {
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
.orElseThrow();
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
try {
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
// we kill execution 2, execution 3 should not run
killQueue.emit(ExecutionKilledExecution
.builder()
.state(ExecutionKilled.State.REQUESTED)
.executionId(execution2.getId())
.isOnKillCascade(true)
.tenantId(tenantId)
.build()
);
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution2);
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
assertThat(killed.getState().getHistories().stream().noneMatch(h -> h.getState() == Type.RUNNING)).isTrue();
// we now check that execution 3 is still queued
Thread.sleep(100); // wait a little to be 100% sure
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution1);
runnerUtils.killExecution(execution3);
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
runnerUtils.awaitFlowExecutionNumber(3, tenantId, NAMESPACE, "flow-concurrency-queue-killed");
}
}
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
File tempFile = File.createTempFile("file", ".txt");

View File

@@ -2,9 +2,7 @@ package io.kestra.core.runners;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.*;
import io.kestra.core.models.flows.input.FileInput;
import io.kestra.core.models.flows.input.InputAndValue;
import io.kestra.core.models.flows.input.IntInput;
@@ -32,6 +30,7 @@ import org.reactivestreams.Publisher;
import reactor.core.publisher.Mono;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import java.net.URI;
import java.nio.ByteBuffer;
@@ -45,10 +44,10 @@ import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
class FlowInputOutputTest {
private static final String TEST_SECRET_VALUE = "test-secret-value";
private static final String TEST_KV_VALUE = "test-kv-value";
static final Execution DEFAULT_TEST_EXECUTION = Execution.builder()
.id(IdUtils.create())
.flowId(IdUtils.create())
@@ -64,7 +63,7 @@ class FlowInputOutputTest {
@Inject
KvMetadataRepositoryInterface kvMetadataRepository;
@MockBean(SecretService.class)
SecretService testSecretService() {
return new SecretService() {
@@ -74,7 +73,7 @@ class FlowInputOutputTest {
}
};
}
@MockBean(KVStoreService.class)
KVStoreService testKVStoreService() {
return new KVStoreService() {
@@ -89,7 +88,7 @@ class FlowInputOutputTest {
}
};
}
@Test
void shouldResolveEnabledInputsGivenInputWithConditionalExpressionMatchingTrue() {
// Given
@@ -294,7 +293,7 @@ class FlowInputOutputTest {
values
);
}
@Test
void resolveInputsGivenDefaultExpressions() {
// Given
@@ -311,14 +310,14 @@ class FlowInputOutputTest {
.required(false)
.dependsOn(new DependsOn(List.of("input1"),null))
.build();
List<Input<?>> inputs = List.of(input1, input2);
Map<String, Object> data = Map.of("input42", "foo");
// When
List<InputAndValue> values = flowInputOutput.resolveInputs(inputs, null, DEFAULT_TEST_EXECUTION, data);
// Then
Assertions.assertEquals(
List.of(
@@ -327,7 +326,7 @@ class FlowInputOutputTest {
values
);
}
@Test
void shouldObfuscateSecretsWhenValidatingInputs() {
// Given
@@ -337,14 +336,14 @@ class FlowInputOutputTest {
.defaults(Property.ofExpression("{{ secret('???') }}"))
.required(false)
.build();
// When
List<InputAndValue> results = flowInputOutput.validateExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
Assertions.assertEquals("******", results.getFirst().value());
}
@Test
void shouldNotObfuscateSecretsInSelectWhenValidatingInputs() {
// Given
@@ -354,10 +353,10 @@ class FlowInputOutputTest {
.expression("{{ [secret('???')] }}")
.required(false)
.build();
// When
List<InputAndValue> results = flowInputOutput.validateExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
Assertions.assertEquals(TEST_SECRET_VALUE, ((MultiselectInput)results.getFirst().input()).getValues().getFirst());
}
@@ -371,14 +370,14 @@ class FlowInputOutputTest {
.defaults(Property.ofExpression("{{ secret('???') }}"))
.required(false)
.build();
// When
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
Assertions.assertEquals(TEST_SECRET_VALUE, results.get("input"));
}
@Test
void shouldEvaluateExpressionOnDefaultsUsingKVFunction() {
// Given
@@ -388,14 +387,14 @@ class FlowInputOutputTest {
.defaults(Property.ofExpression("{{ kv('???') }}"))
.required(false)
.build();
// When
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
assertThat(results.get("input")).isEqualTo(TEST_KV_VALUE);
}
@Test
void shouldGetDefaultWhenPassingNoDataForRequiredInput() {
// Given
@@ -404,50 +403,84 @@ class FlowInputOutputTest {
.type(Type.STRING)
.defaults(Property.ofValue("default"))
.build();
// When
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
assertThat(results.get("input")).isEqualTo("default");
}
@Test
void shouldResolveZeroByteFileUpload() throws java.io.IOException {
File tempFile = File.createTempFile("empty", ".txt");
tempFile.deleteOnExit();
io.micronaut.http.multipart.CompletedFileUpload fileUpload = org.mockito.Mockito.mock(io.micronaut.http.multipart.CompletedFileUpload.class);
org.mockito.Mockito.when(fileUpload.getInputStream()).thenReturn(new java.io.FileInputStream(tempFile));
org.mockito.Mockito.when(fileUpload.getFilename()).thenReturn("empty.txt");
org.mockito.Mockito.when(fileUpload.getName()).thenReturn("empty_file");
Execution execution = Execution.builder()
.id(IdUtils.create())
.tenantId("unit_test_tenant")
.namespace("io.kestra.unittest")
.flowId("unittest")
.flowRevision(1)
.state(new State())
.build();
reactor.core.publisher.Mono<Map<String, Object>> result = flowInputOutput.readExecutionInputs(
List.of(
io.kestra.core.models.flows.input.FileInput.builder().id("empty_file").type(Type.FILE).build()
),
Flow.builder().id("unittest").namespace("io.kestra.unittest").build(),
execution,
reactor.core.publisher.Flux.just(fileUpload)
);
Map<String, Object> outputs = result.block();
Assertions.assertNotNull(outputs);
Assertions.assertTrue(outputs.containsKey("empty_file"));
}
private static class MemoryCompletedPart implements CompletedPart {
protected final String name;
protected final byte[] content;
public MemoryCompletedPart(String name, byte[] content) {
this.name = name;
this.content = content;
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(content);
}
@Override
public byte[] getBytes() {
return content;
}
@Override
public ByteBuffer getByteBuffer() {
return ByteBuffer.wrap(content);
}
@Override
public Optional<MediaType> getContentType() {
return Optional.empty();
}
@Override
public String getName() {
return name;
}
}
private static final class MemoryCompletedFileUpload extends MemoryCompletedPart implements CompletedFileUpload {
private final String fileName;
@@ -456,7 +489,7 @@ class FlowInputOutputTest {
super(name, content);
this.fileName = fileName;
}
@Override
public String getFilename() {
return fileName;

View File

@@ -56,6 +56,18 @@ public class InputsTest {
@Inject
private NamespaceFactory namespaceFactory;
private static final Map<String , Object> object = Map.of(
"people", List.of(
Map.of(
"first", "Mustafa",
"last", "Tarek"
),
Map.of(
"first", "Ahmed",
"last", "Tarek"
)
)
);
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
.put("string", "myString")
.put("enum", "ENUM_VALUE")
@@ -67,7 +79,6 @@ public class InputsTest {
.put("time", "18:27:49")
.put("duration", "PT5M6S")
.put("file", Objects.requireNonNull(InputsTest.class.getClassLoader().getResource("application-test.yml")).getPath())
.put("json", "{\"a\": \"b\"}")
.put("uri", "https://www.google.com")
.put("nested.string", "a string")
.put("nested.more.int", "123")
@@ -81,11 +92,14 @@ public class InputsTest {
.put("validatedTime", "11:27:49")
.put("secret", "secret")
.put("array", "[1, 2, 3]")
.put("yaml", """
.put("json1", "{\"a\": \"b\"}")
.put("json2", object)
.put("yaml1", """
some: property
alist:
- of
- values""")
.put("yaml2", object)
.build();
@Inject
@@ -154,7 +168,6 @@ public class InputsTest {
assertThat(typeds.get("duration")).isEqualTo(Duration.parse("PT5M6S"));
assertThat((URI) typeds.get("file")).isEqualTo(new URI("kestra:///io/kestra/tests/inputs/executions/test/inputs/file/application-test.yml"));
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get("tenant1", null, (URI) typeds.get("file"))))).isEqualTo(CharStreams.toString(new InputStreamReader(new FileInputStream((String) inputs.get("file")))));
assertThat(typeds.get("json")).isEqualTo(Map.of("a", "b"));
assertThat(typeds.get("uri")).isEqualTo("https://www.google.com");
assertThat(((Map<String, Object>) typeds.get("nested")).get("string")).isEqualTo("a string");
assertThat((Boolean) ((Map<String, Object>) typeds.get("nested")).get("bool")).isTrue();
@@ -170,9 +183,12 @@ public class InputsTest {
assertThat(typeds.get("array")).isInstanceOf(List.class);
assertThat((List<Integer>) typeds.get("array")).hasSize(3);
assertThat((List<Integer>) typeds.get("array")).isEqualTo(List.of(1, 2, 3));
assertThat(typeds.get("yaml")).isEqualTo(Map.of(
assertThat(typeds.get("json1")).isEqualTo(Map.of("a", "b"));
assertThat(typeds.get("json2")).isEqualTo(object);
assertThat(typeds.get("yaml1")).isEqualTo(Map.of(
"some", "property",
"alist", List.of("of", "values")));
assertThat(typeds.get("yaml2")).isEqualTo(object);
}
@Test
@@ -201,7 +217,7 @@ public class InputsTest {
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs)
);
assertThat(execution.getTaskRunList()).hasSize(14);
assertThat(execution.getTaskRunList()).hasSize(16);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).matches("kestra:///io/kestra/tests/inputs/executions/.*/inputs/file/application-test.yml");
// secret inputs are decrypted to be used as task properties
@@ -354,19 +370,19 @@ public class InputsTest {
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant14")
void inputEmptyJson() {
HashMap<String, Object> map = new HashMap<>(inputs);
map.put("json", "{}");
map.put("json1", "{}");
Map<String, Object> typeds = typedInputs(map, "tenant14");
assertThat(typeds.get("json")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) typeds.get("json")).size()).isZero();
assertThat(typeds.get("json1")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) typeds.get("json1")).size()).isZero();
}
@Test
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant15")
void inputEmptyJsonFlow() throws TimeoutException, QueueException {
HashMap<String, Object> map = new HashMap<>(inputs);
map.put("json", "{}");
map.put("json1", "{}");
Execution execution = runnerUtils.runOne(
"tenant15",
@@ -376,11 +392,11 @@ public class InputsTest {
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, map)
);
assertThat(execution.getTaskRunList()).hasSize(14);
assertThat(execution.getTaskRunList()).hasSize(16);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getInputs().get("json")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) execution.getInputs().get("json")).size()).isZero();
assertThat(execution.getInputs().get("json1")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) execution.getInputs().get("json1")).size()).isZero();
assertThat((String) execution.findTaskRunsByTaskId("jsonOutput").getFirst().getOutputs().get("value")).isEqualTo("{}");
}

View File

@@ -122,10 +122,10 @@ class YamlParserTest {
void inputs() {
Flow flow = this.parse("flows/valids/inputs.yaml");
assertThat(flow.getInputs().size()).isEqualTo(29);
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(11L);
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(18L);
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(3L);
assertThat(flow.getInputs().size()).isEqualTo(31);
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(12L);
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(19L);
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(4L);
assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput stringInput && stringInput.getValidator() != null).count()).isEqualTo(1L);
}

View File

@@ -1,15 +1,24 @@
package io.kestra.core.utils;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
@KestraTest
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest
public class EditionProviderTest {
@Inject
private EditionProvider editionProvider;
@Inject
private SettingRepositoryInterface settingRepository;
protected EditionProvider.Edition expectedEdition() {
return EditionProvider.Edition.OSS;
}
@@ -17,5 +26,10 @@ public class EditionProviderTest {
@Test
void shouldReturnCurrentEdition() {
Assertions.assertEquals(expectedEdition(), editionProvider.get());
// check that the edition is persisted in settings
Optional<Setting> editionSettings = settingRepository.findByKey(Setting.INSTANCE_EDITION);
assertThat(editionSettings).isPresent();
assertThat(editionSettings.get().getValue()).isEqualTo(expectedEdition().name());
}
}

View File

@@ -48,8 +48,8 @@ class ListUtilsTest {
void convertToListString(){
assertThat(ListUtils.convertToListString(List.of("string1", "string2"))).isEqualTo(List.of("string1", "string2"));
assertThat(ListUtils.convertToListString(List.of())).isEqualTo(List.of());
assertThat(ListUtils.convertToListString(List.of(1, 2, 3))).isEqualTo(List.of("1", "2", "3"));
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString("not a list"));
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString(List.of(1, 2, 3)));
}
}

View File

@@ -1,48 +1,107 @@
package io.kestra.core.utils;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.AppenderBase;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.triggers.TriggerContext;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import static org.assertj.core.api.Assertions.assertThat;
@Slf4j
class LogsTest {
private static final InMemoryAppender MEMORY_APPENDER = new InMemoryAppender();
@BeforeAll
static void setupLogger() {
Logger logger = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
MEMORY_APPENDER.setContext((LoggerContext) LoggerFactory.getILoggerFactory());
MEMORY_APPENDER.start();
logger.addAppender(MEMORY_APPENDER);
}
@AfterEach
void clearLogs() {
MEMORY_APPENDER.clear();
}
@Test
void logFlow() {
var flow = Flow.builder().namespace("namespace").id("flow").build();
var flow = Flow.builder().tenantId("tenant").namespace("namespace").id("flow").build();
Logs.logExecution(flow, log, Level.INFO, "Some log");
Logs.logExecution(flow, log, Level.INFO, "Some log with an {}", "attribute");
Logs.logExecution(flow, log, Level.ERROR, "Some log with an {} and an error", "attribute", new RuntimeException("Test Exception"));
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(3);
}
@Test
void logExecution() {
var execution = Execution.builder().namespace("namespace").flowId("flow").id("execution").build();
Logs.logExecution(execution, log, Level.INFO, "Some log");
Logs.logExecution(execution, log, Level.INFO, "Some log with an {}", "attribute");
var execution = Execution.builder().tenantId("tenant").namespace("namespace").flowId("flow").id("execution").build();
Logs.logExecution(execution, Level.INFO, "Some log");
Logs.logExecution(execution, Level.INFO, "Some log with an {}", "attribute");
Logs.logExecution(execution, Level.INFO, "Some log");
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(3);
assertThat(logs.getFirst().getLoggerName()).isEqualTo("executor.tenant.namespace.flow");
}
@Test
void logTrigger() {
var trigger = TriggerContext.builder().namespace("namespace").flowId("flow").triggerId("trigger").build();
Logs.logTrigger(trigger, log, Level.INFO, "Some log");
Logs.logTrigger(trigger, log, Level.INFO, "Some log with an {}", "attribute");
var trigger = TriggerContext.builder().tenantId("tenant").namespace("namespace").flowId("flow").triggerId("trigger").build();
Logs.logTrigger(trigger, Level.INFO, "Some log");
Logs.logTrigger(trigger, Level.INFO, "Some log with an {}", "attribute");
Logs.logTrigger(trigger, Level.INFO, "Some log");
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(3);
assertThat(logs.getFirst().getLoggerName()).isEqualTo("scheduler.tenant.namespace.flow.trigger");
}
@Test
void logTaskRun() {
var taskRun = TaskRun.builder().namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").build();
var taskRun = TaskRun.builder().tenantId("tenant").namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").build();
Logs.logTaskRun(taskRun, Level.INFO, "Some log");
Logs.logTaskRun(taskRun, Level.INFO, "Some log with an {}", "attribute");
taskRun = TaskRun.builder().namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").value("value").build();
Logs.logTaskRun(taskRun, Level.INFO, "Some log");
Logs.logTaskRun(taskRun, Level.INFO, "Some log with an {}", "attribute");
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(4);
assertThat(logs.getFirst().getLoggerName()).isEqualTo("worker.tenant.namespace.flow.task");
}
private static class InMemoryAppender extends AppenderBase<ILoggingEvent> {
private final List<ILoggingEvent> logs = new CopyOnWriteArrayList<>();
@Override
protected void append(ILoggingEvent event) {
logs.add(event);
}
public List<ILoggingEvent> getLogs() {
return logs;
}
public void clear() {
logs.clear();
}
}
}

View File

@@ -216,4 +216,23 @@ class MapUtilsTest {
"k1.k4", "v2"
));
}
@Test
@SuppressWarnings("unchecked")
void mergeShouldNotDuplicateListElements() {
Map<String, Object> first = Map.of(
"key1", "value1",
"key2", List.of("something", "else")
);
Map<String, Object> second = Map.of(
"key2", List.of("something", "other"),
"key3", "value3"
);
Map<String, Object> results = MapUtils.merge(first, second);
assertThat(results).hasSize(3);
List<String> list = (List<String>) results.get("key2");
assertThat(list).hasSize(3);
}
}

View File

@@ -20,7 +20,6 @@ import org.junit.jupiter.api.parallel.ExecutionMode;
import reactor.core.publisher.Flux;
import java.io.ByteArrayInputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.time.Duration;
@@ -45,9 +44,6 @@ class NamespaceFilesUtilsTest {
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
QueueInterface<LogEntry> workerTaskLogQueue;
@Inject
NamespaceFilesUtils namespaceFilesUtils;
@Inject
NamespaceFactory namespaceFactory;
@@ -66,7 +62,7 @@ class NamespaceFilesUtilsTest {
namespaceStorage.putFile(Path.of("/" + i + ".txt"), data);
}
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().build());
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().build());
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
receive.blockLast();
@@ -91,7 +87,7 @@ class NamespaceFilesUtilsTest {
namespaceStorage.putFile(Path.of("/" + i + ".txt"), data);
}
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
receive.blockLast();
@@ -116,7 +112,7 @@ class NamespaceFilesUtilsTest {
namespaceStorage.putFile(Path.of("/folder2/test.txt"), data);
namespaceStorage.putFile(Path.of("/test.txt"), data);
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
receive.blockLast();
@@ -141,7 +137,7 @@ class NamespaceFilesUtilsTest {
namespaceFactory.of(MAIN_TENANT, ns1, storageInterface).putFile(Path.of("/test.txt"), data);
namespaceFactory.of(MAIN_TENANT, ns2, storageInterface).putFile(Path.of("/test.txt"), data);
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder()
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder()
.namespaces(Property.ofValue(List.of(ns1, ns2)))
.folderPerNamespace(Property.ofValue(true))
.build());

View File

@@ -0,0 +1,30 @@
package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest
class VersionProviderTest {
@Inject
private VersionProvider versionProvider;
@Inject
private SettingRepositoryInterface settingRepository;
@Test
void shouldResolveVersion() {
assertThat(versionProvider.getVersion()).endsWith("-SNAPSHOT");
// check that the version is persisted in settings
Optional<Setting> versionSettings = settingRepository.findByKey(Setting.INSTANCE_VERSION);
assertThat(versionSettings).isPresent();
assertThat(versionSettings.get().getValue()).isEqualTo(versionProvider.getVersion());
}
}

View File

@@ -9,9 +9,15 @@ import io.kestra.core.utils.TestsUtils;
import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.services.FlowService;
import jakarta.validation.ConstraintViolationException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonLocation;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.List;
import java.io.File;
import java.net.URL;
import java.util.Optional;
@@ -23,6 +29,107 @@ class FlowValidationTest {
@Inject
private ModelValidator modelValidator;
@Inject
private FlowService flowService;
private static final ObjectMapper mapper = new ObjectMapper();
// Helper class to create JsonProcessingException with location
private static class TestJsonProcessingException extends JsonProcessingException {
public TestJsonProcessingException(String msg, JsonLocation location) {
super(msg, location);
}
public TestJsonProcessingException(String msg) {
super(msg);
}
}
@Test
void testFormatYamlErrorMessage_WithExpectedFieldName() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Expected a field name", new JsonLocation(null, 100, 5, 10));
Object dummyTarget = new Object(); // Dummy target for toConstraintViolationException
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").contains("(at line 5)");
}
@Test
void testFormatYamlErrorMessage_WithMappingStartEvent() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("MappingStartEvent", new JsonLocation(null, 200, 3, 5));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Unexpected mapping start").contains("(at line 3)");
}
@Test
void testFormatYamlErrorMessage_WithScalarValue() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Scalar value", new JsonLocation(null, 150, 7, 12));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Expected a simple value").contains("(at line 7)");
}
@Test
void testFormatYamlErrorMessage_GenericError() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Some other error", new JsonLocation(null, 50, 2, 8));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML parsing error: Some other error").contains("(at line 2)");
}
@Test
void testFormatYamlErrorMessage_NoLocation() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Expected a field name");
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").doesNotContain("at line");
}
@Test
void testValidateFlowWithYamlSyntaxError() {
String invalidYaml = """
id: test-flow
namespace: io.kestra.unittest
tasks:
- id:hello
type: io.kestra.plugin.core.log.Log
message: {{ abc }}
""";
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", invalidYaml);
assertThat(results).hasSize(1);
assertThat(results.getFirst().getConstraints()).contains("YAML parsing error").contains("at line");
}
@Test
void testValidateFlowWithUndefinedVariable() {
String yamlWithUndefinedVar = """
id: test-flow
namespace: io.kestra.unittest
tasks:
- id: hello
type: io.kestra.plugin.core.log.Log
message: {{ undefinedVar }}
""";
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", yamlWithUndefinedVar);
assertThat(results).hasSize(1);
assertThat(results.getFirst().getConstraints()).contains("Validation error");
}
@Test
void invalidRecursiveFlow() {
Flow flow = this.parse("flows/invalids/recursive-flow.yaml");
@@ -130,4 +237,4 @@ class FlowValidationTest {
return YamlParser.parse(file, Flow.class);
}
}
}

View File

@@ -8,6 +8,7 @@ import io.kestra.core.models.flows.Output;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.History;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.InputAndOutput;
import io.kestra.core.runners.SubflowExecutionResult;
import io.kestra.core.services.VariablesService;
import io.micronaut.context.ApplicationContext;
@@ -46,11 +47,15 @@ class SubflowTest {
@Mock
private ApplicationContext applicationContext;
@Mock
private InputAndOutput inputAndOutput;
@BeforeEach
void beforeEach() {
Mockito.when(applicationContext.getBean(VariablesService.class)).thenReturn(new VariablesService());
Mockito.when(runContext.logger()).thenReturn(LOG);
Mockito.when(runContext.getApplicationContext()).thenReturn(applicationContext);
Mockito.when(runContext.inputAndOutput()).thenReturn(inputAndOutput);
}
@Test
@@ -118,7 +123,7 @@ class SubflowTest {
Map<String, Object> outputs = Map.of("key", "value");
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(outputs);
Mockito.when(inputAndOutput.renderOutputs(Mockito.anyList())).thenReturn(Map.of("key", "value"));
Subflow subflow = Subflow.builder()
.outputs(outputs)
@@ -159,6 +164,7 @@ class SubflowTest {
Output output = Output.builder().id("key").value("value").build();
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(Map.of(output.getId(), output.getValue()));
Mockito.when(inputAndOutput.typedOutputs(Mockito.any(), Mockito.any(), Mockito.anyMap())).thenReturn(Map.of("key", "value"));
Flow flow = Flow.builder()
.outputs(List.of(output))
.build();

View File

@@ -1,9 +1,11 @@
package io.kestra.plugin.core.flow;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.as;
import static org.assertj.core.api.Assertions.assertThat;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.junit.annotations.ExecuteFlow;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
@@ -100,4 +102,14 @@ class SwitchTest {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
}
@Test
@ExecuteFlow("flows/valids/switch-in-concurrent-loop.yaml")
void switchInConcurrentLoop(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getTaskRunList()).hasSize(5);
// we check that OOMCRM_EB_DD_000 and OOMCRM_EB_DD_001 have been processed once
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_000")).count()).isEqualTo(1);
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_001")).count()).isEqualTo(1);
}
}

View File

@@ -57,7 +57,7 @@ class ScheduleOnDatesTest {
}
@Test
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() throws Exception {
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() {
// given
var now = ZonedDateTime.now();
var before = now.minusMinutes(1).truncatedTo(ChronoUnit.SECONDS);
@@ -75,7 +75,7 @@ class ScheduleOnDatesTest {
ZonedDateTime nextDate = scheduleOnDates.nextEvaluationDate(conditionContext, Optional.empty());
// then
assertThat(nextDate).isEqualTo(before);
assertThat(nextDate).isEqualTo(after);
}
@Test

View File

@@ -13,6 +13,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.StringInput;
import io.kestra.core.models.flows.input.MultiselectInput;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.runners.RunContextFactory;
@@ -103,8 +104,9 @@ class ScheduleTest {
);
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).hasSize(3);
assertThat(evaluate.get().getLabels()).hasSize(4);
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
var vars = evaluate.get().getTrigger().getVariables();
var inputs = evaluate.get().getInputs();
@@ -137,8 +139,9 @@ class ScheduleTest {
);
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).hasSize(3);
assertThat(evaluate.get().getLabels()).hasSize(4);
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
var inputs = evaluate.get().getInputs();
@@ -475,6 +478,81 @@ class ScheduleTest {
assertThat(result.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
}
@Test
void successWithMultiselectInputDefaults() throws Exception {
Schedule trigger = Schedule.builder().id("schedule").type(Schedule.class.getName()).cron("0 0 1 * *").build();
ZonedDateTime date = ZonedDateTime.now()
.withDayOfMonth(1)
.withHour(0)
.withMinute(0)
.withSecond(0)
.truncatedTo(ChronoUnit.SECONDS)
.minusMonths(1);
Optional<Execution> evaluate = trigger.evaluate(
conditionContextWithMultiselectInput(trigger),
triggerContext(date, trigger));
assertThat(evaluate.isPresent()).isTrue();
var inputs = evaluate.get().getInputs();
// Verify MULTISELECT input with explicit defaults works correctly
assertThat(inputs.get("multiselectInput")).isEqualTo(List.of("option1", "option2"));
}
@Test
void successWithMultiselectInputAutoSelectFirst() throws Exception {
Schedule trigger = Schedule.builder().id("schedule").type(Schedule.class.getName()).cron("0 0 1 * *").build();
ZonedDateTime date = ZonedDateTime.now()
.withDayOfMonth(1)
.withHour(0)
.withMinute(0)
.withSecond(0)
.truncatedTo(ChronoUnit.SECONDS)
.minusMonths(1);
Optional<Execution> evaluate = trigger.evaluate(
conditionContextWithMultiselectAutoSelectFirst(trigger),
triggerContext(date, trigger));
assertThat(evaluate.isPresent()).isTrue();
var inputs = evaluate.get().getInputs();
// Verify MULTISELECT input with autoSelectFirst defaults to first option
assertThat(inputs.get("multiselectAutoSelect")).isEqualTo(List.of("first"));
}
@Test
void successWithMultiselectInputProvidedValue() throws Exception {
// Test that provided values override defaults for MULTISELECT
Schedule trigger = Schedule.builder()
.id("schedule")
.type(Schedule.class.getName())
.cron("0 0 1 * *")
.inputs(Map.of("multiselectInput", List.of("option3")))
.build();
ZonedDateTime date = ZonedDateTime.now()
.withDayOfMonth(1)
.withHour(0)
.withMinute(0)
.withSecond(0)
.truncatedTo(ChronoUnit.SECONDS)
.minusMonths(1);
Optional<Execution> evaluate = trigger.evaluate(
conditionContextWithMultiselectInput(trigger),
triggerContext(date, trigger));
assertThat(evaluate.isPresent()).isTrue();
var inputs = evaluate.get().getInputs();
// Verify provided value overrides defaults
assertThat(inputs.get("multiselectInput")).isEqualTo(List.of("option3"));
}
private ConditionContext conditionContext(AbstractTrigger trigger) {
Flow flow = Flow.builder()
.id(IdUtils.create())
@@ -504,17 +582,79 @@ class ScheduleTest {
.build();
}
private ConditionContext conditionContextWithMultiselectInput(AbstractTrigger trigger) {
Flow flow = Flow.builder()
.id(IdUtils.create())
.namespace("io.kestra.tests")
.labels(
List.of(
new Label("flow-label-1", "flow-label-1"),
new Label("flow-label-2", "flow-label-2")))
.variables(Map.of("custom_var", "VARIABLE VALUE"))
.inputs(List.of(
MultiselectInput.builder()
.id("multiselectInput")
.type(Type.MULTISELECT)
.values(List.of("option1", "option2", "option3"))
.defaults(Property.ofValue(List.of("option1", "option2")))
.build()))
.build();
TriggerContext triggerContext = TriggerContext.builder()
.namespace(flow.getNamespace())
.flowId(flow.getId())
.triggerId(trigger.getId())
.build();
return ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(),
triggerContext, trigger))
.flow(flow)
.build();
}
private ConditionContext conditionContextWithMultiselectAutoSelectFirst(AbstractTrigger trigger) {
Flow flow = Flow.builder()
.id(IdUtils.create())
.namespace("io.kestra.tests")
.labels(
List.of(
new Label("flow-label-1", "flow-label-1"),
new Label("flow-label-2", "flow-label-2")))
.variables(Map.of("custom_var", "VARIABLE VALUE"))
.inputs(List.of(
MultiselectInput.builder()
.id("multiselectAutoSelect")
.type(Type.MULTISELECT)
.values(List.of("first", "second", "third"))
.autoSelectFirst(true)
.build()))
.build();
TriggerContext triggerContext = TriggerContext.builder()
.namespace(flow.getNamespace())
.flowId(flow.getId())
.triggerId(trigger.getId())
.build();
return ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(),
triggerContext, trigger))
.flow(flow)
.build();
}
private ZonedDateTime dateFromVars(String date, ZonedDateTime expexted) {
return ZonedDateTime.parse(date).withZoneSameInstant(expexted.getZone());
}
@Test
void shouldGetNextExecutionDateWithConditionMatchingFutureDate() throws InternalException {
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
OffsetTime before = now.minusHours(1).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
OffsetTime after = now.minusHours(4).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
Schedule trigger = Schedule.builder()
.id("schedule").type(Schedule.class.getName())
.cron("0 * * * *") // every hour
@@ -527,25 +667,25 @@ class ScheduleTest {
.build()
))
.build();
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
ConditionContext conditionContext = ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
.build();
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
assertThat(result).isNotEmpty();
}
@Test
void shouldGetNextExecutionDateWithConditionMatchingCurrentDate() throws InternalException {
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
OffsetTime before = now.plusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
OffsetTime after = now.minusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
Schedule trigger = Schedule.builder()
.id("schedule").type(Schedule.class.getName())
.cron("*/30 * * * * *")
@@ -558,13 +698,13 @@ class ScheduleTest {
.build()
))
.build();
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
ConditionContext conditionContext = ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
.build();
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
assertThat(result).isNotEmpty();
}

View File

@@ -8,4 +8,4 @@ concurrency:
tasks:
- id: sleep
type: io.kestra.plugin.core.flow.Sleep
duration: PT10S
duration: PT2S

View File

@@ -0,0 +1,11 @@
id: flow-concurrency-queue-killed
namespace: io.kestra.tests
concurrency:
behavior: QUEUE
limit: 1
tasks:
- id: sleep
type: io.kestra.plugin.core.flow.Sleep
duration: PT1M

View File

@@ -41,7 +41,10 @@ inputs:
- id: instantDefaults
type: DATETIME
defaults: "2013-08-09T14:19:00Z"
- id: json
- id: json1
type: JSON
required: false
- id: json2
type: JSON
required: false
- id: uri
@@ -95,7 +98,7 @@ inputs:
- name: array
type: ARRAY
itemType: INT
- name: yaml
- name: yaml1
type: YAML
defaults:
property: something
@@ -104,6 +107,15 @@ inputs:
value: value1
- key: key2
value: value2
- name: yaml2
type: YAML
defaults:
property: something
list:
- key: key1
value: value1
- key: key2
value: value2
# required true and an empty default value will only work if we correctly serialize default values which is what this input is about to test.
- name: empty
type: STRING
@@ -140,12 +152,18 @@ tasks:
type: io.kestra.plugin.core.debug.Return
format: "{{taskrun.value}}"
- id: json
- id: json1
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.json}}"
format: "{{inputs.json1}}"
- id: json2
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.json2}}"
- id: jsonOutput
type: io.kestra.plugin.core.debug.Return
format: "{{outputs.json.value}}"
- id: yamlOutput
format: "{{outputs.json1.value}}"
- id: yamlOutput1
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.yaml}}"
format: "{{inputs.yaml1}}"
- id: yamlOutput2
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.yaml2}}"

View File

@@ -0,0 +1,23 @@
id: switch-in-concurrent-loop
namespace: io.kestra.tests
tasks:
- id: iterate_and_check_name
type: io.kestra.plugin.core.flow.ForEach
tasks:
- id: switch
type: io.kestra.plugin.core.flow.Switch
value: "{{ taskrun.value }}"
cases:
"Alice":
- id: OOMCRM_EB_DD_000
type: io.kestra.plugin.core.log.Log
message: Alice
"Bob":
- id: OOMCRM_EB_DD_001
type: io.kestra.plugin.core.log.Log
message: Bob
values: ["Alice", "Bob"]
concurrencyLimit: 0

View File

@@ -13,18 +13,19 @@ tasks:
- io.test.second
- io.test.third
enabled: true
folderPerNamespace: true
exclude:
- /ignore/**
tasks:
- id: t1
type: io.kestra.core.tasks.test.Read
path: "/test/a/b/c/1.txt"
path: "/io.test.third/test/a/b/c/1.txt"
- id: t2
type: io.kestra.core.tasks.test.Read
path: "/a/b/c/2.txt"
path: "/io.test.second/a/b/c/2.txt"
- id: t3
type: io.kestra.core.tasks.test.Read
path: "/a/b/3.txt"
path: "/io.test.first/a/b/3.txt"
- id: t4
type: io.kestra.core.tasks.test.Read
path: "/ignore/4.txt"

View File

@@ -402,10 +402,11 @@ public class ExecutorService {
if (flow.getOutputs() != null) {
RunContext runContext = runContextFactory.of(executor.getFlow(), executor.getExecution());
var inputAndOutput = runContext.inputAndOutput();
try {
Map<String, Object> outputs = FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext);
outputs = flowInputOutput.typedOutputs(flow, executor.getExecution(), outputs);
Map<String, Object> outputs = inputAndOutput.renderOutputs(flow.getOutputs());
outputs = inputAndOutput.typedOutputs(flow, executor.getExecution(), outputs);
newExecution = newExecution.withOutputs(outputs);
} catch (Exception e) {
Logs.logExecution(

View File

@@ -16,7 +16,7 @@ public final class H2RepositoryUtils {
case MONTH:
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM')", Date.class);
case WEEK:
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'YYYY-ww')", Date.class);
return DSL.field("DATE_TRUNC('WEEK', \"" + dateField + "\")", Date.class);
case DAY:
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM-dd')", Date.class);
case HOUR:

View File

@@ -3,5 +3,5 @@ package io.kestra.repository.h2;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepositoryTest;
public class H2FlowRepositoryTest extends AbstractJdbcFlowRepositoryTest {
}

View File

@@ -0,0 +1,6 @@
package io.kestra.runner.h2;
import io.kestra.jdbc.runner.JdbcConcurrencyRunnerTest;
public class H2RunnerConcurrencyTest extends JdbcConcurrencyRunnerTest {
}

Some files were not shown because too many files have changed in this diff Show More