Compare commits

...

38 Commits

Author SHA1 Message Date
AJ Emerich
42c8334e2e Merge branch 'develop' into docs/purgeFiles 2025-12-18 04:02:10 -06:00
Loïc Mathieu
7ea95f393e feat(execution): add a system.from label
Closes https://github.com/kestra-io/kestra-ee/issues/4699
2025-12-17 15:49:33 +01:00
Piyush Bhaskar
6935900699 fix(core): add a no-op update function to oss store to initialize update (#13732) 2025-12-17 19:47:40 +05:30
AJ Emerich
123d7fb426 Update core/src/main/java/io/kestra/plugin/core/namespace/PurgeFiles.java 2025-12-17 15:13:09 +01:00
Saif M
0bc8e8d74a fix(flow) Improve Exception Handling with clear error message (#13674)
* fix: improved error handling

* including the line

* added tests

* added unit tests
2025-12-17 14:26:53 +01:00
AJ Emerich
e0c3cfa1f9 docs(PurgeFiles): update documentation and example 2025-12-17 12:26:57 +01:00
dependabot[bot]
7f77b24ae0 build(deps): bump com.google.cloud:libraries-bom from 26.72.0 to 26.73.0
Bumps [com.google.cloud:libraries-bom](https://github.com/googleapis/java-cloud-bom) from 26.72.0 to 26.73.0.
- [Release notes](https://github.com/googleapis/java-cloud-bom/releases)
- [Changelog](https://github.com/googleapis/java-cloud-bom/blob/main/release-please-config.json)
- [Commits](https://github.com/googleapis/java-cloud-bom/compare/v26.72.0...v26.73.0)

---
updated-dependencies:
- dependency-name: com.google.cloud:libraries-bom
  dependency-version: 26.73.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 10:14:55 +01:00
dependabot[bot]
ec6820dc25 build(deps): bump org.aspectj:aspectjweaver from 1.9.25 to 1.9.25.1
Bumps [org.aspectj:aspectjweaver](https://github.com/eclipse/org.aspectj) from 1.9.25 to 1.9.25.1.
- [Release notes](https://github.com/eclipse/org.aspectj/releases)
- [Commits](https://github.com/eclipse/org.aspectj/commits)

---
updated-dependencies:
- dependency-name: org.aspectj:aspectjweaver
  dependency-version: 1.9.25.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:51:03 +01:00
dependabot[bot]
d94193c143 build(deps): bump software.amazon.awssdk.crt:aws-crt
Bumps [software.amazon.awssdk.crt:aws-crt](https://github.com/awslabs/aws-crt-java) from 0.40.3 to 0.41.0.
- [Release notes](https://github.com/awslabs/aws-crt-java/releases)
- [Commits](https://github.com/awslabs/aws-crt-java/compare/v0.40.3...v0.41.0)

---
updated-dependencies:
- dependency-name: software.amazon.awssdk.crt:aws-crt
  dependency-version: 0.41.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:50:42 +01:00
dependabot[bot]
c9628047fa build(deps): bump io.qameta.allure:allure-bom from 2.31.0 to 2.32.0
Bumps [io.qameta.allure:allure-bom](https://github.com/allure-framework/allure-java) from 2.31.0 to 2.32.0.
- [Release notes](https://github.com/allure-framework/allure-java/releases)
- [Commits](https://github.com/allure-framework/allure-java/compare/2.31.0...2.32.0)

---
updated-dependencies:
- dependency-name: io.qameta.allure:allure-bom
  dependency-version: 2.32.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:50:13 +01:00
dependabot[bot]
4cbc069af4 build(deps): bump nl.basjes.gitignore:gitignore-reader
Bumps [nl.basjes.gitignore:gitignore-reader](https://github.com/nielsbasjes/codeowners) from 1.13.0 to 1.14.1.
- [Release notes](https://github.com/nielsbasjes/codeowners/releases)
- [Changelog](https://github.com/nielsbasjes/codeowners/blob/main/CHANGELOG.md)
- [Commits](https://github.com/nielsbasjes/codeowners/compare/v1.13.0...v1.14.1)

---
updated-dependencies:
- dependency-name: nl.basjes.gitignore:gitignore-reader
  dependency-version: 1.14.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:49:11 +01:00
dependabot[bot]
eabe573fe6 build(deps): bump software.amazon.awssdk:bom from 2.40.5 to 2.40.10
Bumps software.amazon.awssdk:bom from 2.40.5 to 2.40.10.

---
updated-dependencies:
- dependency-name: software.amazon.awssdk:bom
  dependency-version: 2.40.10
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:46:17 +01:00
dependabot[bot]
ecd64617c3 build(deps): bump org.testcontainers:junit-jupiter from 1.21.3 to 1.21.4
Bumps [org.testcontainers:junit-jupiter](https://github.com/testcontainers/testcontainers-java) from 1.21.3 to 1.21.4.
- [Release notes](https://github.com/testcontainers/testcontainers-java/releases)
- [Changelog](https://github.com/testcontainers/testcontainers-java/blob/main/CHANGELOG.md)
- [Commits](https://github.com/testcontainers/testcontainers-java/compare/1.21.3...1.21.4)

---
updated-dependencies:
- dependency-name: org.testcontainers:junit-jupiter
  dependency-version: 1.21.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:45:26 +01:00
dependabot[bot]
a5650bca0f build(deps): bump org.sonarqube from 7.2.0.6526 to 7.2.1.6560
Bumps org.sonarqube from 7.2.0.6526 to 7.2.1.6560.

---
updated-dependencies:
- dependency-name: org.sonarqube
  dependency-version: 7.2.1.6560
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:44:58 +01:00
dependabot[bot]
ed59e262d4 build(deps): bump org.apache.logging.log4j:log4j-to-slf4j
Bumps org.apache.logging.log4j:log4j-to-slf4j from 2.25.2 to 2.25.3.

---
updated-dependencies:
- dependency-name: org.apache.logging.log4j:log4j-to-slf4j
  dependency-version: 2.25.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:44:30 +01:00
dependabot[bot]
a5f9d54f7d build(deps): bump io.pebbletemplates:pebble from 4.0.0 to 4.1.0
Bumps [io.pebbletemplates:pebble](https://github.com/PebbleTemplates/pebble) from 4.0.0 to 4.1.0.
- [Release notes](https://github.com/PebbleTemplates/pebble/releases)
- [Changelog](https://github.com/PebbleTemplates/pebble/blob/master/release.properties)
- [Commits](https://github.com/PebbleTemplates/pebble/compare/4.0.0...4.1.0)

---
updated-dependencies:
- dependency-name: io.pebbletemplates:pebble
  dependency-version: 4.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-17 09:38:13 +01:00
Aaryan meena
47f4f43198 refactor(core): remove usage of unnecessary i18n composable (#13686)
Closes https://github.com/kestra-io/kestra/issues/13640.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-17 08:23:27 +01:00
dependabot[bot]
5d31c97f7f build(deps): bump the minor group in /ui with 6 updates (#13725)
Bumps the minor group in /ui with 6 updates:

| Package | From | To |
| --- | --- | --- |
| [posthog-js](https://github.com/PostHog/posthog-js) | `1.304.0` | `1.308.0` |
| [shiki](https://github.com/shikijs/shiki/tree/HEAD/packages/shiki) | `3.19.0` | `3.20.0` |
| [@shikijs/markdown-it](https://github.com/shikijs/shiki/tree/HEAD/packages/markdown-it) | `3.19.0` | `3.20.0` |
| [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) | `8.49.0` | `8.50.0` |
| [sass](https://github.com/sass/dart-sass) | `1.96.0` | `1.97.0` |
| [typescript-eslint](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/typescript-eslint) | `8.49.0` | `8.50.0` |


Updates `posthog-js` from 1.304.0 to 1.308.0
- [Release notes](https://github.com/PostHog/posthog-js/releases)
- [Changelog](https://github.com/PostHog/posthog-js/blob/main/CHANGELOG.md)
- [Commits](https://github.com/PostHog/posthog-js/compare/posthog-js@1.304.0...posthog-js@1.308.0)

Updates `shiki` from 3.19.0 to 3.20.0
- [Release notes](https://github.com/shikijs/shiki/releases)
- [Commits](https://github.com/shikijs/shiki/commits/v3.20.0/packages/shiki)

Updates `@shikijs/markdown-it` from 3.19.0 to 3.20.0
- [Release notes](https://github.com/shikijs/shiki/releases)
- [Commits](https://github.com/shikijs/shiki/commits/v3.20.0/packages/markdown-it)

Updates `@typescript-eslint/parser` from 8.49.0 to 8.50.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/parser/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.50.0/packages/parser)

Updates `sass` from 1.96.0 to 1.97.0
- [Release notes](https://github.com/sass/dart-sass/releases)
- [Changelog](https://github.com/sass/dart-sass/blob/main/CHANGELOG.md)
- [Commits](https://github.com/sass/dart-sass/compare/1.96.0...1.97.0)

Updates `typescript-eslint` from 8.49.0 to 8.50.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/typescript-eslint/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.50.0/packages/typescript-eslint)

---
updated-dependencies:
- dependency-name: posthog-js
  dependency-version: 1.308.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: shiki
  dependency-version: 3.20.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@shikijs/markdown-it"
  dependency-version: 3.20.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: "@typescript-eslint/parser"
  dependency-version: 8.50.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: sass
  dependency-version: 1.97.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
- dependency-name: typescript-eslint
  dependency-version: 8.50.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:18:19 +01:00
dependabot[bot]
f8107285c4 build(deps): bump the patch group in /ui with 5 updates (#13726)
Bumps the patch group in /ui with 5 updates:

| Package | From | To |
| --- | --- | --- |
| [vue-router](https://github.com/vuejs/router) | `4.6.3` | `4.6.4` |
| [@eslint/js](https://github.com/eslint/eslint/tree/HEAD/packages/js) | `9.39.1` | `9.39.2` |
| [@vitejs/plugin-vue](https://github.com/vitejs/vite-plugin-vue/tree/HEAD/packages/plugin-vue) | `6.0.2` | `6.0.3` |
| [eslint](https://github.com/eslint/eslint) | `9.39.1` | `9.39.2` |
| [rolldown-vite](https://github.com/vitejs/rolldown-vite/tree/HEAD/packages/vite) | `7.2.10` | `7.2.11` |


Updates `vue-router` from 4.6.3 to 4.6.4
- [Release notes](https://github.com/vuejs/router/releases)
- [Commits](https://github.com/vuejs/router/compare/v4.6.3...v4.6.4)

Updates `@eslint/js` from 9.39.1 to 9.39.2
- [Release notes](https://github.com/eslint/eslint/releases)
- [Commits](https://github.com/eslint/eslint/commits/v9.39.2/packages/js)

Updates `@vitejs/plugin-vue` from 6.0.2 to 6.0.3
- [Release notes](https://github.com/vitejs/vite-plugin-vue/releases)
- [Changelog](https://github.com/vitejs/vite-plugin-vue/blob/main/packages/plugin-vue/CHANGELOG.md)
- [Commits](https://github.com/vitejs/vite-plugin-vue/commits/plugin-vue@6.0.3/packages/plugin-vue)

Updates `eslint` from 9.39.1 to 9.39.2
- [Release notes](https://github.com/eslint/eslint/releases)
- [Commits](https://github.com/eslint/eslint/compare/v9.39.1...v9.39.2)

Updates `rolldown-vite` from 7.2.10 to 7.2.11
- [Release notes](https://github.com/vitejs/rolldown-vite/releases)
- [Changelog](https://github.com/vitejs/rolldown-vite/blob/rolldown-vite/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/rolldown-vite/commits/v7.2.11/packages/vite)

---
updated-dependencies:
- dependency-name: vue-router
  dependency-version: 4.6.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: "@eslint/js"
  dependency-version: 9.39.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: "@vitejs/plugin-vue"
  dependency-version: 6.0.3
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: eslint
  dependency-version: 9.39.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
- dependency-name: rolldown-vite
  dependency-version: 7.2.11
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:13:30 +01:00
dependabot[bot]
8dc8dc1796 build(deps): bump the build group in /ui with 9 updates (#13723)
Bumps the build group in /ui with 9 updates:

| Package | From | To |
| --- | --- | --- |
| [@esbuild/darwin-arm64](https://github.com/evanw/esbuild) | `0.27.1` | `0.27.2` |
| [@esbuild/darwin-x64](https://github.com/evanw/esbuild) | `0.27.1` | `0.27.2` |
| [@esbuild/linux-x64](https://github.com/evanw/esbuild) | `0.27.1` | `0.27.2` |
| [@rollup/rollup-darwin-arm64](https://github.com/rollup/rollup) | `4.53.3` | `4.53.5` |
| [@rollup/rollup-darwin-x64](https://github.com/rollup/rollup) | `4.53.3` | `4.53.5` |
| [@rollup/rollup-linux-x64-gnu](https://github.com/rollup/rollup) | `4.53.3` | `4.53.5` |
| [@swc/core-darwin-arm64](https://github.com/swc-project/swc) | `1.15.3` | `1.15.5` |
| [@swc/core-darwin-x64](https://github.com/swc-project/swc) | `1.15.3` | `1.15.5` |
| [@swc/core-linux-x64-gnu](https://github.com/swc-project/swc) | `1.15.3` | `1.15.5` |


Updates `@esbuild/darwin-arm64` from 0.27.1 to 0.27.2
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.1...v0.27.2)

Updates `@esbuild/darwin-x64` from 0.27.1 to 0.27.2
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.1...v0.27.2)

Updates `@esbuild/linux-x64` from 0.27.1 to 0.27.2
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.1...v0.27.2)

Updates `@rollup/rollup-darwin-arm64` from 4.53.3 to 4.53.5
- [Release notes](https://github.com/rollup/rollup/releases)
- [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rollup/rollup/compare/v4.53.3...v4.53.5)

Updates `@rollup/rollup-darwin-x64` from 4.53.3 to 4.53.5
- [Release notes](https://github.com/rollup/rollup/releases)
- [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rollup/rollup/compare/v4.53.3...v4.53.5)

Updates `@rollup/rollup-linux-x64-gnu` from 4.53.3 to 4.53.5
- [Release notes](https://github.com/rollup/rollup/releases)
- [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rollup/rollup/compare/v4.53.3...v4.53.5)

Updates `@swc/core-darwin-arm64` from 1.15.3 to 1.15.5
- [Release notes](https://github.com/swc-project/swc/releases)
- [Changelog](https://github.com/swc-project/swc/blob/main/CHANGELOG.md)
- [Commits](https://github.com/swc-project/swc/compare/v1.15.3...v1.15.5)

Updates `@swc/core-darwin-x64` from 1.15.3 to 1.15.5
- [Release notes](https://github.com/swc-project/swc/releases)
- [Changelog](https://github.com/swc-project/swc/blob/main/CHANGELOG.md)
- [Commits](https://github.com/swc-project/swc/compare/v1.15.3...v1.15.5)

Updates `@swc/core-linux-x64-gnu` from 1.15.3 to 1.15.5
- [Release notes](https://github.com/swc-project/swc/releases)
- [Changelog](https://github.com/swc-project/swc/blob/main/CHANGELOG.md)
- [Commits](https://github.com/swc-project/swc/compare/v1.15.3...v1.15.5)

---
updated-dependencies:
- dependency-name: "@esbuild/darwin-arm64"
  dependency-version: 0.27.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@esbuild/darwin-x64"
  dependency-version: 0.27.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@esbuild/linux-x64"
  dependency-version: 0.27.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@rollup/rollup-darwin-arm64"
  dependency-version: 4.53.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@rollup/rollup-darwin-x64"
  dependency-version: 4.53.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@rollup/rollup-linux-x64-gnu"
  dependency-version: 4.53.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@swc/core-darwin-arm64"
  dependency-version: 1.15.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@swc/core-darwin-x64"
  dependency-version: 1.15.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
- dependency-name: "@swc/core-linux-x64-gnu"
  dependency-version: 1.15.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: build
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:09:02 +01:00
dependabot[bot]
834dfd2947 build(deps-dev): bump @types/node in /ui in the types group (#13724)
Bumps the types group in /ui with 1 update: [@types/node](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node).


Updates `@types/node` from 25.0.0 to 25.0.3
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/node)

---
updated-dependencies:
- dependency-name: "@types/node"
  dependency-version: 25.0.3
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: types
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-17 08:08:41 +01:00
YannC
6edb88841f feat(jdbc): method without auditlog for setting repository (#13676)
* feat(jdbc): method without auditlog for setting repository

* test: add flaky annotation
2025-12-16 16:38:50 +01:00
Loïc Mathieu
5653531628 fix(test): avoid killing an already killed execution 2025-12-16 14:39:00 +01:00
github-actions[bot]
ee61276106 chore(core): localize to languages other than english (#13698)
Co-authored-by: GitHub Action <actions@github.com>
2025-12-16 14:28:22 +01:00
Barthélémy Ledoux
abcf76f7b4 fix: avoid blocking creation of flow when edition is restricted to a namespace (#13694) 2025-12-16 14:24:16 +01:00
YannC
67ada7f61b fix: remove JsonIgnore annotation from FlowWithSource and add schema(hidden=true) to Flow (#13681) 2025-12-16 14:23:56 +01:00
Florian Hussonnois
0c13633f77 fix(trigger): ScheduleOnDates should work with backfill
Changes:
* ScheduleOnDates must not be re-scheduled when trigger is updated
* ScheduleOnDates must not be scheduled on previous dates when created
* ScheduleOnDates should properly support backfill

Create new SchedulableExecutionFactory class to hold all methods related
to Schedulable trigger and which are only used by core triggers

Related-to: #13673
2025-12-16 13:47:47 +01:00
Loïc Mathieu
a6cf2015ff fix(tests): concurrency test restarted 2025-12-16 13:42:42 +01:00
Sumit Shandillya
2f9216c70b fix(triggers): improve layout of action buttons in trigger table (#13658) 2025-12-16 17:50:46 +05:30
Piyush Bhaskar
1903e6fac5 fix(plugins): avoid list flash when opening plugin (#13690) 2025-12-16 17:38:09 +05:30
Loïc Mathieu
2d2cb00cab feat(execution): bring support for input and output processing in the run context
Part-of: https://github.com/kestra-io/kestra-ee/issues/4228

Encapsulate access the FlowInputOutput from the RunContext in a new InputAndOutput component with a currated list of supported methods used by plugins.
2025-12-16 12:19:48 +01:00
Loïc Mathieu
01b5441d16 feat(trigger): refactor Schedule to not use the application context
Part-of:  https://github.com/kestra-io/kestra-ee/issues/4228
2025-12-16 12:19:30 +01:00
Loïc Mathieu
efc778e294 feat(system): save the edition in settings
This would allow to detect OSS -> EE migration.

Closes https://github.com/kestra-io/kestra-ee/issues/5106
2025-12-16 11:06:01 +01:00
Will Russell
60235a4e73 docs(task-runner): remove deprecated runner from example (#13654) 2025-12-16 10:01:27 +00:00
Piyush Bhaskar
b167c52e76 fix(core): properly sync default namespace filters from settings with default filter (#13685) 2025-12-16 15:30:55 +05:30
Florian Hussonnois
216b124294 feat(trigger): add support for concurrent trigger execution (#311)
Fixes: #311
2025-12-16 09:50:48 +01:00
vamsi172323
b6e4df8de2 refactor(core): remove usage of unnecessary i18n composable (#13683)
Closes https://github.com/kestra-io/kestra/issues/13649.

Co-authored-by: MilosPaunovic <paun992@hotmail.com>
2025-12-16 08:26:27 +01:00
Loïc Mathieu
429e7c7945 feat(execution): allow listing the internal storage from the run context
Part-of: https://github.com/kestra-io/kestra-ee/issues/4228
2025-12-15 18:06:49 +01:00
80 changed files with 1036 additions and 647 deletions

View File

@@ -21,7 +21,7 @@ plugins {
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.2.0.6526"
id "org.sonarqube" version "7.2.1.6560"
id 'jacoco-report-aggregation'
// helper
@@ -331,7 +331,7 @@ subprojects {
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.25"
agent "org.aspectj:aspectjweaver:1.9.25.1"
}
test {

View File

@@ -82,8 +82,8 @@ dependencies {
testImplementation "io.micronaut:micronaut-http-server-netty"
testImplementation "io.micronaut:micronaut-management"
testImplementation "org.testcontainers:testcontainers:1.21.3"
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
testImplementation "org.testcontainers:testcontainers:1.21.4"
testImplementation "org.testcontainers:junit-jupiter:1.21.4"
testImplementation "org.bouncycastle:bcpkix-jdk18on"
testImplementation "org.wiremock:wiremock-jetty12"

View File

@@ -26,6 +26,7 @@ public record Label(
public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
public static final String TEST = SYSTEM_PREFIX + "test";
public static final String FROM = SYSTEM_PREFIX + "from";
/**
* Static helper method for converting a list of labels to a nested map.

View File

@@ -16,6 +16,7 @@ import jakarta.validation.constraints.NotNull;
public class Setting {
public static final String INSTANCE_UUID = "instance.uuid";
public static final String INSTANCE_VERSION = "instance.version";
public static final String INSTANCE_EDITION = "instance.edition";
@NotNull
private String key;

View File

@@ -1,6 +1,5 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -130,7 +129,7 @@ public class Flow extends AbstractFlow implements HasUID {
@Valid
@PluginProperty
List<SLA> sla;
@Schema(
title = "Conditions evaluated before the flow is executed.",
description = "A list of conditions that are evaluated before the flow is executed. If no checks are defined, the flow executes normally."
@@ -355,7 +354,7 @@ public class Flow extends AbstractFlow implements HasUID {
* To be conservative a flow MUST not return any source.
*/
@Override
@JsonIgnore
@Schema(hidden = true)
public String getSource() {
return null;
}

View File

@@ -1,14 +1,12 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.micronaut.core.annotation.Introspected;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.Objects;
import java.util.regex.Pattern;
@SuperBuilder(toBuilder = true)
@Getter
@@ -48,7 +46,7 @@ public class FlowWithSource extends Flow {
}
@Override
@JsonIgnore(value = false)
@Schema(hidden = false)
public String getSource() {
return this.source;
}

View File

@@ -82,6 +82,12 @@ abstract public class AbstractTrigger implements TriggerInterface {
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
private boolean failOnTriggerError = false;
@PluginProperty(group = PluginProperty.CORE_GROUP)
@Schema(
title = "Specifies whether a trigger is allowed to start a new execution even if a previous run is still in progress."
)
private boolean allowConcurrent = false;
/**
* For backward compatibility: we rename minLogLevel to logLevel.
* @deprecated use {@link #logLevel} instead

View File

@@ -1,22 +1,37 @@
package io.kestra.core.models.triggers;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.runners.RunContext;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.ZonedDateTime;
import java.util.Map;
public interface Schedulable extends PollingTriggerInterface{
String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules";
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
Map<String, Object> getInputs();
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
RecoverMissedSchedules getRecoverMissedSchedules();
/**
* Compute the previous evaluation of a trigger.
* This is used when a trigger misses some schedule to compute the next date to evaluate in the past.
*/
ZonedDateTime previousEvaluationDate(ConditionContext conditionContext) throws IllegalVariableEvaluationException;
RecoverMissedSchedules getRecoverMissedSchedules();
/**
* Load the default RecoverMissedSchedules from plugin property, or else ALL.
*/

View File

@@ -172,7 +172,7 @@ public class Trigger extends TriggerContext implements HasUID {
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
try {
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, lastTrigger);
} catch (InvalidTriggerConfigurationException e) {
disabled = true;
}

View File

@@ -6,12 +6,9 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.tasks.Output;
import io.kestra.core.models.flows.State;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.RunContext;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.util.*;
public abstract class TriggerService {
@@ -51,49 +48,6 @@ public abstract class TriggerService {
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
}
public static Execution generateScheduledExecution(
AbstractTrigger trigger,
ConditionContext conditionContext,
TriggerContext context,
List<Label> labels,
Map<String, Object> inputs,
Map<String, Object> variables,
Optional<ZonedDateTime> scheduleDate
) {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables);
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(context.getTenantId())
.namespace(context.getNamespace())
.flowId(context.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(scheduleDate.map(date -> date.toInstant()).orElse(null))
.build();
Map<String, Object> allInputs = new HashMap<>();
if (inputs != null) {
allInputs.putAll(inputs);
}
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
return execution;
}
private static Execution generateExecution(
String id,
AbstractTrigger trigger,
@@ -102,6 +56,7 @@ public abstract class TriggerService {
ConditionContext conditionContext
) {
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, id));

View File

@@ -1,10 +1,10 @@
package io.kestra.core.repositories;
import io.kestra.core.models.Setting;
import jakarta.validation.ConstraintViolationException;
import java.util.List;
import java.util.Optional;
import jakarta.validation.ConstraintViolationException;
public interface SettingRepositoryInterface {
Optional<Setting> findByKey(String key);
@@ -13,5 +13,7 @@ public interface SettingRepositoryInterface {
Setting save(Setting setting) throws ConstraintViolationException;
Setting internalSave(Setting setting) throws ConstraintViolationException;
Setting delete(Setting setting);
}

View File

@@ -16,8 +16,8 @@ import java.util.function.Function;
public interface TriggerRepositoryInterface extends QueryBuilderInterface<Triggers.Fields> {
Optional<Trigger> findLast(TriggerContext trigger);
Optional<Trigger> findByExecution(Execution execution);
Optional<Trigger> findByUid(String uid);
List<Trigger> findAll(String tenantId);
List<Trigger> findAllForAllTenants();

View File

@@ -599,6 +599,11 @@ public class DefaultRunContext extends RunContext {
return localPath;
}
@Override
public InputAndOutput inputAndOutput() {
return new InputAndOutputImpl(this.applicationContext, this);
}
/**
* Builder class for constructing new {@link DefaultRunContext} objects.
*/

View File

@@ -189,12 +189,11 @@ public final class ExecutableUtils {
variables.put("taskRunIteration", currentTaskRun.getIteration());
}
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
Instant scheduleOnDate = runContext.render(scheduleDate).as(ZonedDateTime.class).map(date -> date.toInstant()).orElse(null);
Execution execution = Execution
.newExecution(
flow,
(f, e) -> flowInputOutput.readExecutionInputs(f, e, inputs),
(f, e) -> runContext.inputAndOutput().readInputs(f, e, inputs),
newLabels,
Optional.empty())
.withTrigger(ExecutionTrigger.builder()

View File

@@ -3,13 +3,11 @@ package io.kestra.core.runners;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Data;
import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Output;
import io.kestra.core.models.flows.RenderableInput;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.FileInput;
@@ -539,30 +537,6 @@ public class FlowInputOutput {
}
}
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
/**
* Mutable wrapper to hold a flow's input, and it's resolved value.
*/

View File

@@ -0,0 +1,29 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import java.util.List;
import java.util.Map;
/**
* InputAndOutput could be used to work with flow execution inputs and outputs.
*/
public interface InputAndOutput {
/**
* Reads the inputs of a flow execution.
*/
Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs);
/**
* Processes the outputs of a flow execution (parse them based on their types).
*/
Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs);
/**
* Render flow execution outputs.
*/
Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException;
}

View File

@@ -0,0 +1,56 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import io.micronaut.context.ApplicationContext;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class InputAndOutputImpl implements InputAndOutput {
private final FlowInputOutput flowInputOutput;
private final RunContext runContext;
InputAndOutputImpl(ApplicationContext applicationContext, RunContext runContext) {
this.flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
this.runContext = runContext;
}
@Override
public Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs) {
return flowInputOutput.readExecutionInputs(flow, execution, inputs);
}
@Override
public Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs) {
return flowInputOutput.typedOutputs(flow, execution, rOutputs);
}
@Override
public Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
}

View File

@@ -211,4 +211,9 @@ public abstract class RunContext implements PropertyContext {
* @return a new run context with the plugin configuration of the given plugin.
*/
public abstract RunContext cloneForPlugin(Plugin plugin);
/**
* @return an InputAndOutput that can be used to work with inputs and outputs.
*/
public abstract InputAndOutput inputAndOutput();
}

View File

@@ -81,7 +81,24 @@ public final class YamlParser {
throw toConstraintViolationException(input, resource, e);
}
}
private static String formatYamlErrorMessage(String originalMessage, JsonProcessingException e) {
StringBuilder friendlyMessage = new StringBuilder();
if (originalMessage.contains("Expected a field name")) {
friendlyMessage.append("YAML syntax error: Invalid structure. Check indentation and ensure all fields are properly formatted.");
} else if (originalMessage.contains("MappingStartEvent")) {
friendlyMessage.append("YAML syntax error: Unexpected mapping start. Verify that scalar values are properly quoted if needed.");
} else if (originalMessage.contains("Scalar value")) {
friendlyMessage.append("YAML syntax error: Expected a simple value but found complex structure. Check for unquoted special characters.");
} else {
friendlyMessage.append("YAML parsing error: ").append(originalMessage.replaceAll("org\\.yaml\\.snakeyaml.*", "").trim());
}
if (e.getLocation() != null) {
int line = e.getLocation().getLineNr();
friendlyMessage.append(String.format(" (at line %d)", line));
}
// Return a generic but cleaner message for other YAML errors
return friendlyMessage.toString();
}
@SuppressWarnings("unchecked")
public static <T> ConstraintViolationException toConstraintViolationException(T target, String resource, JsonProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException constraintViolationException) {
@@ -121,11 +138,12 @@ public final class YamlParser {
)
));
} else {
String userFriendlyMessage = formatYamlErrorMessage(e.getMessage(), e);
return new ConstraintViolationException(
"Illegal " + resource + " source: " + e.getMessage(),
"Illegal " + resource + " source: " + userFriendlyMessage,
Collections.singleton(
ManualConstraintViolation.of(
e.getCause() == null ? e.getMessage() : e.getMessage() + "\nCaused by: " + e.getCause().getMessage(),
userFriendlyMessage,
target,
(Class<T>) target.getClass(),
"yaml",
@@ -136,4 +154,3 @@ public final class YamlParser {
}
}
}

View File

@@ -4,7 +4,6 @@ import com.cronutils.utils.VisibleForTesting;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
@@ -65,16 +64,6 @@ public class ConditionService {
return this.valid(flow, conditions, conditionContext);
}
/**
* Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated.
*/
public boolean isValid(List<ScheduleCondition> conditions, ConditionContext conditionContext) throws InternalException {
return conditions
.stream()
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
}
/**
* Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated.

View File

@@ -92,7 +92,14 @@ public class FlowService {
return flowRepository
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
}
private static String formatValidationError(String message) {
if (message.startsWith("Illegal flow source:")) {
// Already formatted by YamlParser, return as-is
return message;
}
// For other validation errors, provide context
return "Validation error: " + message;
}
/**
* Evaluates all checks defined in the given flow using the provided inputs.
* <p>
@@ -174,10 +181,12 @@ public class FlowService {
modelValidator.validate(pluginDefaultService.injectAllDefaults(flow, false));
} catch (ConstraintViolationException e) {
validateConstraintViolationBuilder.constraints(e.getMessage());
String friendlyMessage = formatValidationError(e.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
} catch (FlowProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException) {
validateConstraintViolationBuilder.constraints(e.getMessage());
if (e.getCause() instanceof ConstraintViolationException cve) {
String friendlyMessage = formatValidationError(cve.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
} else {
Throwable cause = e.getCause() != null ? e.getCause() : e;
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + cause.getMessage());
@@ -579,4 +588,4 @@ public class FlowService {
private IllegalStateException noRepositoryException() {
return new IllegalStateException("No repository found. Make sure the `kestra.repository.type` property is set.");
}
}
}

View File

@@ -1,6 +1,5 @@
package io.kestra.core.storages;
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
import io.kestra.core.services.NamespaceService;
import jakarta.annotation.Nullable;
import org.slf4j.Logger;
@@ -272,7 +271,13 @@ public class InternalStorage implements Storage {
return this.storage.put(context.getTenantId(), context.getNamespace(), resolve, new BufferedInputStream(inputStream));
}
@Override
public Optional<StorageContext.Task> getTaskStorageContext() {
return Optional.ofNullable((context instanceof StorageContext.Task task) ? task : null);
}
@Override
public List<FileAttributes> list(URI uri) throws IOException {
return this.storage.list(context.getTenantId(), context.getNamespace(), uri);
}
}

View File

@@ -173,4 +173,6 @@ public interface Storage {
* @return the task storage context
*/
Optional<StorageContext.Task> getTaskStorageContext();
List<FileAttributes> list(URI uri) throws IOException;
}

View File

@@ -1,13 +1,39 @@
package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.util.Optional;
@Singleton
public class EditionProvider {
public Edition get() {
return Edition.OSS;
}
@Inject
private Optional<SettingRepositoryInterface> settingRepository; // repositories are not always there on unit tests
@PostConstruct
void start() {
// check the edition in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
settingRepository.ifPresent(settingRepositoryInterface -> persistEdition(settingRepositoryInterface, get()));
}
private void persistEdition(SettingRepositoryInterface settingRepositoryInterface, Edition edition) {
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_EDITION);
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(edition)) {
settingRepositoryInterface.save(Setting.builder()
.key(Setting.INSTANCE_EDITION)
.value(edition)
.build()
);
}
}
public enum Edition {
OSS,
EE

View File

@@ -23,7 +23,6 @@ import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.services.StorageService;
import io.kestra.core.storages.FileAttributes;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.StorageSplitInterface;
import io.kestra.core.utils.GraphUtils;
import io.kestra.core.validations.NoSystemLabelValidation;
@@ -540,7 +539,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
.numberOfBatches((Integer) taskRun.getOutputs().get(ExecutableUtils.TASK_VARIABLE_NUMBER_OF_BATCHES));
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
FileSerde.write(bos, FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext));
FileSerde.write(bos, runContext.inputAndOutput().renderOutputs(flow.getOutputs()));
URI uri = runContext.storage().putFile(
new ByteArrayInputStream(bos.toByteArray()),
URI.create((String) taskRun.getOutputs().get("uri"))
@@ -602,9 +601,8 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
String subflowOutputsBase = (String) taskOutput.get(ExecutableUtils.TASK_VARIABLE_SUBFLOW_OUTPUTS_BASE_URI);
URI subflowOutputsBaseUri = URI.create(StorageContext.KESTRA_PROTOCOL + subflowOutputsBase + "/");
StorageInterface storage = ((DefaultRunContext) runContext).getApplicationContext().getBean(StorageInterface.class);
if (storage.exists(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri)) {
List<FileAttributes> list = storage.list(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri);
if (runContext.storage().isFileExist(subflowOutputsBaseUri)) {
List<FileAttributes> list = runContext.storage().list(subflowOutputsBaseUri);;
if (!list.isEmpty()) {
// Merge outputs from each sub-flow into a single stored in the internal storage.

View File

@@ -63,7 +63,8 @@ import java.util.*;
- id: run_post_approval
type: io.kestra.plugin.scripts.shell.Commands
runner: PROCESS
taskRunner:
type: io.kestra.plugin.core.runner.Process
commands:
- echo "Manual approval received! Continuing the execution..."

View File

@@ -18,7 +18,6 @@ import io.kestra.core.models.tasks.ExecutableTask;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.ExecutableUtils;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.FlowMetaStoreInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecution;
@@ -38,7 +37,6 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import org.slf4j.event.Level;
import java.time.ZonedDateTime;
import java.util.Collections;
@@ -246,11 +244,11 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
try {
Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
var inputAndOutput = runContext.inputAndOutput();
Map<String, Object> rOutputs = inputAndOutput.renderOutputs(subflowOutputs);
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
if (flow.getOutputs() != null && flowInputOutput != null) {
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
if (flow.getOutputs() != null) {
rOutputs = inputAndOutput.typedOutputs(flow, execution, rOutputs);
}
builder.outputs(rOutputs);
} catch (Exception e) {

View File

@@ -26,25 +26,28 @@ import java.util.concurrent.atomic.AtomicLong;
@Getter
@NoArgsConstructor
@Schema(
title = "Delete expired keys globally for a specific namespace.",
description = "This task will delete expired keys from the Kestra KV store. By default, it will only delete expired keys, but you can choose to delete all keys by setting `expiredOnly` to false. You can also filter keys by a specific pattern and choose to include child namespaces."
title = "Purge namespace files for one or multiple namespaces.",
description = "This task purges namespace files (and their versions) stored in Kestra. You can restrict the purge to specific namespaces (or a namespace glob pattern), optionally include child namespaces, and filter files by a glob pattern. The purge strategy is controlled via `behavior` (e.g. keep the last N versions and/or delete versions older than a given date)."
)
@Plugin(
examples = {
@Example(
title = "Delete expired keys globally for a specific namespace, with or without including child namespaces.",
title = "Purge old versions of namespace files for a namespace tree.",
full = true,
code = """
id: purge_kv_store
id: purge_namespace_files
namespace: system
tasks:
- id: purge_kv
type: io.kestra.plugin.core.kv.PurgeKV
expiredOnly: true
- id: purge_files
type: io.kestra.plugin.core.namespace.PurgeFiles
namespaces:
- company
includeChildNamespaces: true
filePattern: "**/*.sql"
behavior:
type: version
before: "2025-01-01T00:00:00Z"
"""
)
}
@@ -116,7 +119,7 @@ public class PurgeFiles extends Task implements PurgeTask<NamespaceFile>, Runnab
@Getter
public static class Output implements io.kestra.core.models.tasks.Output {
@Schema(
title = "The number of purged KV pairs"
title = "The number of purged namespace file versions"
)
private Long size;
}

View File

@@ -0,0 +1,107 @@
package io.kestra.plugin.core.trigger;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Label;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.Backfill;
import io.kestra.core.models.triggers.Schedulable;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.time.chrono.ChronoZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Factory class for constructing a new {@link Execution} from a {@link Schedulable} trigger.
*
* @see io.kestra.plugin.core.trigger.Schedule
* @see io.kestra.plugin.core.trigger.ScheduleOnDates
*/
final class SchedulableExecutionFactory {
static Execution createFailedExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext) throws IllegalVariableEvaluationException {
return Execution.builder()
.id(conditionContext.getRunContext().getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(SchedulableExecutionFactory.getLabels(trigger, conditionContext.getRunContext(), triggerContext.getBackfill(), conditionContext.getFlow()))
.state(new State().withState(State.Type.FAILED))
.build();
}
static Execution createExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext, Map<String, Object> variables, ZonedDateTime scheduleDate) throws IllegalVariableEvaluationException {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of((AbstractTrigger) trigger, variables);
List<Label> labels = getLabels(trigger, runContext, triggerContext.getBackfill(), conditionContext.getFlow());
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(Optional.ofNullable(scheduleDate).map(ChronoZonedDateTime::toInstant).orElse(null))
.build();
Map<String, Object> allInputs = getInputs(trigger, runContext, triggerContext.getBackfill());
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
execution = execution.withInputs(runContext.inputAndOutput().readInputs(conditionContext.getFlow(), execution, allInputs));
return execution;
}
private static Map<String, Object> getInputs(Schedulable trigger, RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (trigger.getInputs() != null) {
inputs.putAll(runContext.render(trigger.getInputs()));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private static List<Label> getLabels(Schedulable trigger, RunContext runContext, Backfill backfill, FlowInterface flow) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, flow, (AbstractTrigger) trigger);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
}

View File

@@ -6,9 +6,7 @@ import com.cronutils.model.time.ExecutionTime;
import com.cronutils.parser.CronParser;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.Example;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
@@ -16,12 +14,8 @@ import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.ConditionService;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.validations.ScheduleValidation;
import io.kestra.core.validations.TimezoneId;
@@ -29,6 +23,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Null;
import lombok.AccessLevel;
import lombok.*;
import lombok.experimental.SuperBuilder;
import lombok.extern.slf4j.Slf4j;
@@ -40,6 +35,8 @@ import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Stream;
import static io.kestra.core.utils.Rethrow.throwPredicate;
@Slf4j
@SuperBuilder
@ToString
@@ -224,11 +221,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty
@Deprecated
private List<ScheduleCondition> scheduleConditions;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs;
@Schema(
@@ -248,13 +241,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty
@Deprecated
private Map<String, Object> backfill;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules;
@Override
@@ -403,20 +390,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
if (!conditionResults) {
return Optional.empty();
}
} catch(InternalException ie) {
} catch (InternalException ie) {
// validate schedule condition can fail to render variables
// in this case, we return a failed execution so the trigger is not evaluated each second
runContext.logger().error("Unable to evaluate the Schedule trigger '{}'", this.getId(), ie);
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(generateLabels(runContext, conditionContext, backfill))
.state(new State().withState(State.Type.FAILED))
.build();
return Optional.of(execution);
return Optional.of(SchedulableExecutionFactory.createFailedExecution(this, conditionContext, triggerContext));
}
// recalculate true output for previous and next based on conditions
@@ -430,14 +408,12 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
variables = scheduleDates.toMap();
}
Execution execution = TriggerService.generateScheduledExecution(
Execution execution = SchedulableExecutionFactory.createExecution(
this,
conditionContext,
triggerContext,
generateLabels(runContext, conditionContext, backfill),
generateInputs(runContext, backfill),
variables,
Optional.empty()
null
);
return Optional.of(execution);
@@ -448,34 +424,6 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
return parser.parse(this.cron);
}
private List<Label> generateLabels(RunContext runContext, ConditionContext conditionContext, Backfill backfill) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, conditionContext.getFlow(), this);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
private Map<String, Object> generateInputs(RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (this.inputs != null) {
inputs.putAll(runContext.render(this.inputs));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private Optional<Output> scheduleDates(ExecutionTime executionTime, ZonedDateTime date) {
Optional<ZonedDateTime> next = executionTime.nextExecution(date.minus(Duration.ofSeconds(1)));
@@ -549,9 +497,9 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
Optional<ZonedDateTime> truePreviousNextDateWithCondition(ExecutionTime executionTime, ConditionContext conditionContext, ZonedDateTime toTestDate, boolean next) throws InternalException {
int upperYearBound = ZonedDateTime.now().getYear() + 10;
int lowerYearBound = ZonedDateTime.now().getYear() - 10;
while ((next && toTestDate.getYear() < upperYearBound) || (!next && toTestDate.getYear() > lowerYearBound)) {
Optional<ZonedDateTime> currentDate = next ?
executionTime.nextExecution(toTestDate) :
executionTime.lastExecution(toTestDate);
@@ -607,11 +555,10 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
private boolean validateScheduleCondition(ConditionContext conditionContext) throws InternalException {
if (conditions != null) {
ConditionService conditionService = ((DefaultRunContext)conditionContext.getRunContext()).getApplicationContext().getBean(ConditionService.class);
return conditionService.isValid(
conditions.stream().filter(c -> c instanceof ScheduleCondition).map(c -> (ScheduleCondition) c).toList(),
conditionContext
);
return conditions.stream()
.filter(c -> c instanceof ScheduleCondition)
.map(c -> (ScheduleCondition) c)
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
}
return true;

View File

@@ -10,7 +10,6 @@ import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.VoidOutput;
import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.validations.TimezoneId;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
@@ -23,7 +22,10 @@ import java.time.Duration;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Predicate;
import static io.kestra.core.utils.Rethrow.throwFunction;
@@ -45,11 +47,7 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@Builder.Default
@Null
private final Duration interval = null;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs;
@TimezoneId
@@ -63,31 +61,24 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@NotNull
private Property<List<ZonedDateTime>> dates;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules;
@Override
public Optional<Execution> evaluate(ConditionContext conditionContext, TriggerContext triggerContext) throws Exception {
RunContext runContext = conditionContext.getRunContext();
ZonedDateTime lastEvaluation = triggerContext.getDate();
Optional<ZonedDateTime> nextDate = nextDate(runContext, date -> date.isEqual(lastEvaluation) || date.isAfter(lastEvaluation));
if (nextDate.isPresent()) {
log.info("Schedule execution on {}", nextDate.get());
Execution execution = TriggerService.generateScheduledExecution(
Execution execution = SchedulableExecutionFactory.createExecution(
this,
conditionContext,
triggerContext,
LabelService.fromTrigger(runContext, conditionContext.getFlow(), this),
this.inputs != null ? runContext.render(this.inputs) : Collections.emptyMap(),
Collections.emptyMap(),
nextDate
nextDate.orElse(null)
);
return Optional.of(execution);
@@ -97,29 +88,21 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
}
@Override
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) {
try {
return last
.map(throwFunction(context ->
nextDate(conditionContext.getRunContext(), date -> date.isAfter(context.getDate()))
.orElse(ZonedDateTime.now().plusYears(1))
))
.orElse(conditionContext.getRunContext()
.render(dates)
.asList(ZonedDateTime.class)
.stream()
.sorted()
.findFirst()
.orElse(ZonedDateTime.now()))
.truncatedTo(ChronoUnit.SECONDS);
} catch (IllegalVariableEvaluationException e) {
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
return ZonedDateTime.now().plusYears(1);
}
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> triggerContext) {
return triggerContext
.map(ctx -> ctx.getBackfill() != null ? ctx.getBackfill().getCurrentDate() : ctx.getDate())
.map(this::withTimeZone)
.or(() -> Optional.of(ZonedDateTime.now()))
.flatMap(dt -> {
try {
return nextDate(conditionContext.getRunContext(), date -> date.isAfter(dt));
} catch (IllegalVariableEvaluationException e) {
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
throw new InvalidTriggerConfigurationException("Failed to evaluate schedule 'dates'. Cause: " + e.getMessage());
}
}).orElseGet(() -> ZonedDateTime.now().plusYears(1));
}
@Override
public ZonedDateTime nextEvaluationDate() {
// TODO this may be the next date from now?
@@ -139,9 +122,17 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
return previousDates.isEmpty() ? ZonedDateTime.now() : previousDates.getFirst();
}
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> filter) throws IllegalVariableEvaluationException {
return runContext.render(dates).asList(ZonedDateTime.class).stream().sorted()
.filter(date -> filter.test(date))
private ZonedDateTime withTimeZone(ZonedDateTime date) {
if (this.timezone == null) {
return date;
}
return date.withZoneSameInstant(ZoneId.of(this.timezone));
}
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> predicate) throws IllegalVariableEvaluationException {
return runContext.render(dates)
.asList(ZonedDateTime.class).stream().sorted()
.filter(predicate)
.map(throwFunction(date -> timezone == null ? date : date.withZoneSameInstant(ZoneId.of(runContext.render(timezone)))))
.findFirst()
.map(date -> date.truncatedTo(ChronoUnit.SECONDS));

View File

@@ -170,10 +170,11 @@ class JsonSchemaGeneratorTest {
Map<String, Object> jsonSchema = jsonSchemaGenerator.generate(AbstractTrigger.class, AbstractTrigger.class);
assertThat((Map<String, Object>) jsonSchema.get("properties"), allOf(
Matchers.aMapWithSize(3),
Matchers.aMapWithSize(4),
hasKey("conditions"),
hasKey("stopAfter"),
hasKey("type")
hasKey("type"),
hasKey("allowConcurrent")
));
});
}

View File

@@ -60,6 +60,15 @@ class SystemInformationReportTest {
return setting;
}
@Override
public Setting internalSave(Setting setting) throws ConstraintViolationException {
if (setting.getKey().equals(Setting.INSTANCE_UUID)) {
UUID = setting.getValue();
}
return setting;
}
@Override
public Setting delete(Setting setting) {
return setting;

View File

@@ -1,9 +1,9 @@
package io.kestra.core.repositories;
import com.devskiller.friendly_id.FriendlyId;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.InvalidQueryFiltersException;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.Label;
import io.kestra.core.models.QueryFilter;
@@ -24,7 +24,6 @@ import io.kestra.core.models.flows.State.Type;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.NamespaceUtils;
import io.kestra.core.utils.TestsUtils;
@@ -42,10 +41,9 @@ import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.event.Level;
import java.io.IOException;
import java.sql.Timestamp;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.time.Duration;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
@@ -185,6 +183,7 @@ public abstract class AbstractExecutionRepositoryTest {
@ParameterizedTest
@MethodSource("filterCombinations")
@FlakyTest(description = "Filtering tests are sometimes returning 0")
void should_find_all(QueryFilter filter, int expectedSize){
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
inject(tenant, "executionTriggerId");

View File

@@ -178,7 +178,10 @@ public class FlowConcurrencyCaseTest {
// we restart the first one, it should be queued then fail again.
Execution failedExecution = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution1);
Execution restarted = executionService.restart(failedExecution, null);
Execution executionResult1 = runnerUtils.restartExecution(e -> e.getState().getCurrent().equals(Type.FAILED), restarted);
Execution executionResult1 = runnerUtils.restartExecution(
e -> e.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED) && e.getState().getCurrent().equals(Type.FAILED),
restarted
);
Execution executionResult2 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution2);
assertThat(executionResult1.getState().getCurrent()).isEqualTo(Type.FAILED);
@@ -278,7 +281,6 @@ public class FlowConcurrencyCaseTest {
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution1);
runnerUtils.killExecution(execution2);
runnerUtils.killExecution(execution3);
@@ -321,7 +323,6 @@ public class FlowConcurrencyCaseTest {
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution1);
runnerUtils.killExecution(execution2);
runnerUtils.killExecution(execution3);
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do

View File

@@ -1,15 +1,24 @@
package io.kestra.core.utils;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
@KestraTest
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest
public class EditionProviderTest {
@Inject
private EditionProvider editionProvider;
@Inject
private SettingRepositoryInterface settingRepository;
protected EditionProvider.Edition expectedEdition() {
return EditionProvider.Edition.OSS;
}
@@ -17,5 +26,10 @@ public class EditionProviderTest {
@Test
void shouldReturnCurrentEdition() {
Assertions.assertEquals(expectedEdition(), editionProvider.get());
// check that the edition is persisted in settings
Optional<Setting> editionSettings = settingRepository.findByKey(Setting.INSTANCE_EDITION);
assertThat(editionSettings).isPresent();
assertThat(editionSettings.get().getValue()).isEqualTo(expectedEdition().name());
}
}

View File

@@ -0,0 +1,30 @@
package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest
class VersionProviderTest {
@Inject
private VersionProvider versionProvider;
@Inject
private SettingRepositoryInterface settingRepository;
@Test
void shouldResolveVersion() {
assertThat(versionProvider.getVersion()).endsWith("-SNAPSHOT");
// check that the version is persisted in settings
Optional<Setting> versionSettings = settingRepository.findByKey(Setting.INSTANCE_VERSION);
assertThat(versionSettings).isPresent();
assertThat(versionSettings.get().getValue()).isEqualTo(versionProvider.getVersion());
}
}

View File

@@ -9,9 +9,15 @@ import io.kestra.core.utils.TestsUtils;
import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.services.FlowService;
import jakarta.validation.ConstraintViolationException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonLocation;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.List;
import java.io.File;
import java.net.URL;
import java.util.Optional;
@@ -23,6 +29,107 @@ class FlowValidationTest {
@Inject
private ModelValidator modelValidator;
@Inject
private FlowService flowService;
private static final ObjectMapper mapper = new ObjectMapper();
// Helper class to create JsonProcessingException with location
private static class TestJsonProcessingException extends JsonProcessingException {
public TestJsonProcessingException(String msg, JsonLocation location) {
super(msg, location);
}
public TestJsonProcessingException(String msg) {
super(msg);
}
}
@Test
void testFormatYamlErrorMessage_WithExpectedFieldName() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Expected a field name", new JsonLocation(null, 100, 5, 10));
Object dummyTarget = new Object(); // Dummy target for toConstraintViolationException
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").contains("(at line 5)");
}
@Test
void testFormatYamlErrorMessage_WithMappingStartEvent() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("MappingStartEvent", new JsonLocation(null, 200, 3, 5));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Unexpected mapping start").contains("(at line 3)");
}
@Test
void testFormatYamlErrorMessage_WithScalarValue() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Scalar value", new JsonLocation(null, 150, 7, 12));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Expected a simple value").contains("(at line 7)");
}
@Test
void testFormatYamlErrorMessage_GenericError() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Some other error", new JsonLocation(null, 50, 2, 8));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML parsing error: Some other error").contains("(at line 2)");
}
@Test
void testFormatYamlErrorMessage_NoLocation() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Expected a field name");
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").doesNotContain("at line");
}
@Test
void testValidateFlowWithYamlSyntaxError() {
String invalidYaml = """
id: test-flow
namespace: io.kestra.unittest
tasks:
- id:hello
type: io.kestra.plugin.core.log.Log
message: {{ abc }}
""";
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", invalidYaml);
assertThat(results).hasSize(1);
assertThat(results.getFirst().getConstraints()).contains("YAML parsing error").contains("at line");
}
@Test
void testValidateFlowWithUndefinedVariable() {
String yamlWithUndefinedVar = """
id: test-flow
namespace: io.kestra.unittest
tasks:
- id: hello
type: io.kestra.plugin.core.log.Log
message: {{ undefinedVar }}
""";
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", yamlWithUndefinedVar);
assertThat(results).hasSize(1);
assertThat(results.getFirst().getConstraints()).contains("Validation error");
}
@Test
void invalidRecursiveFlow() {
Flow flow = this.parse("flows/invalids/recursive-flow.yaml");
@@ -130,4 +237,4 @@ class FlowValidationTest {
return YamlParser.parse(file, Flow.class);
}
}
}

View File

@@ -8,6 +8,7 @@ import io.kestra.core.models.flows.Output;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.History;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.InputAndOutput;
import io.kestra.core.runners.SubflowExecutionResult;
import io.kestra.core.services.VariablesService;
import io.micronaut.context.ApplicationContext;
@@ -46,11 +47,15 @@ class SubflowTest {
@Mock
private ApplicationContext applicationContext;
@Mock
private InputAndOutput inputAndOutput;
@BeforeEach
void beforeEach() {
Mockito.when(applicationContext.getBean(VariablesService.class)).thenReturn(new VariablesService());
Mockito.when(runContext.logger()).thenReturn(LOG);
Mockito.when(runContext.getApplicationContext()).thenReturn(applicationContext);
Mockito.when(runContext.inputAndOutput()).thenReturn(inputAndOutput);
}
@Test
@@ -118,7 +123,7 @@ class SubflowTest {
Map<String, Object> outputs = Map.of("key", "value");
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(outputs);
Mockito.when(inputAndOutput.renderOutputs(Mockito.anyList())).thenReturn(Map.of("key", "value"));
Subflow subflow = Subflow.builder()
.outputs(outputs)
@@ -159,6 +164,7 @@ class SubflowTest {
Output output = Output.builder().id("key").value("value").build();
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(Map.of(output.getId(), output.getValue()));
Mockito.when(inputAndOutput.typedOutputs(Mockito.any(), Mockito.any(), Mockito.anyMap())).thenReturn(Map.of("key", "value"));
Flow flow = Flow.builder()
.outputs(List.of(output))
.build();

View File

@@ -57,7 +57,7 @@ class ScheduleOnDatesTest {
}
@Test
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() throws Exception {
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() {
// given
var now = ZonedDateTime.now();
var before = now.minusMinutes(1).truncatedTo(ChronoUnit.SECONDS);
@@ -75,7 +75,7 @@ class ScheduleOnDatesTest {
ZonedDateTime nextDate = scheduleOnDates.nextEvaluationDate(conditionContext, Optional.empty());
// then
assertThat(nextDate).isEqualTo(before);
assertThat(nextDate).isEqualTo(after);
}
@Test

View File

@@ -104,8 +104,9 @@ class ScheduleTest {
);
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).hasSize(3);
assertThat(evaluate.get().getLabels()).hasSize(4);
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
var vars = evaluate.get().getTrigger().getVariables();
var inputs = evaluate.get().getInputs();
@@ -138,8 +139,9 @@ class ScheduleTest {
);
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).hasSize(3);
assertThat(evaluate.get().getLabels()).hasSize(4);
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
var inputs = evaluate.get().getInputs();
@@ -645,14 +647,14 @@ class ScheduleTest {
private ZonedDateTime dateFromVars(String date, ZonedDateTime expexted) {
return ZonedDateTime.parse(date).withZoneSameInstant(expexted.getZone());
}
@Test
void shouldGetNextExecutionDateWithConditionMatchingFutureDate() throws InternalException {
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
OffsetTime before = now.minusHours(1).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
OffsetTime after = now.minusHours(4).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
Schedule trigger = Schedule.builder()
.id("schedule").type(Schedule.class.getName())
.cron("0 * * * *") // every hour
@@ -665,25 +667,25 @@ class ScheduleTest {
.build()
))
.build();
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
ConditionContext conditionContext = ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
.build();
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
assertThat(result).isNotEmpty();
}
@Test
void shouldGetNextExecutionDateWithConditionMatchingCurrentDate() throws InternalException {
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
OffsetTime before = now.plusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
OffsetTime after = now.minusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
Schedule trigger = Schedule.builder()
.id("schedule").type(Schedule.class.getName())
.cron("*/30 * * * * *")
@@ -696,13 +698,13 @@ class ScheduleTest {
.build()
))
.build();
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
ConditionContext conditionContext = ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
.build();
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
assertThat(result).isNotEmpty();
}

View File

@@ -402,10 +402,11 @@ public class ExecutorService {
if (flow.getOutputs() != null) {
RunContext runContext = runContextFactory.of(executor.getFlow(), executor.getExecution());
var inputAndOutput = runContext.inputAndOutput();
try {
Map<String, Object> outputs = FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext);
outputs = flowInputOutput.typedOutputs(flow, executor.getExecution(), outputs);
Map<String, Object> outputs = inputAndOutput.renderOutputs(flow.getOutputs());
outputs = inputAndOutput.typedOutputs(flow, executor.getExecution(), outputs);
newExecution = newExecution.withOutputs(outputs);
} catch (Exception e) {
Logs.logExecution(

View File

@@ -44,9 +44,15 @@ public abstract class AbstractJdbcSettingRepository extends AbstractJdbcCrudRepo
@Override
public Setting save(Setting setting) {
this.eventPublisher.publishEvent(new CrudEvent<>(setting, CrudEventType.UPDATE));
return internalSave(setting);
}
@Override
public Setting internalSave(Setting setting) {
Map<Field<Object>, Object> fields = this.jdbcRepository.persistFields(setting);
this.jdbcRepository.persist(setting, fields);
this.eventPublisher.publishEvent(new CrudEvent<>(setting, CrudEventType.UPDATE));
return setting;
}

View File

@@ -72,12 +72,12 @@ public abstract class AbstractJdbcTriggerRepository extends AbstractJdbcCrudRepo
@Override
public Optional<Trigger> findLast(TriggerContext trigger) {
return findOne(DSL.trueCondition(), field("key").eq(trigger.uid()));
return findByUid(trigger.uid());
}
@Override
public Optional<Trigger> findByExecution(Execution execution) {
return findOne(execution.getTenantId(), field("execution_id").eq(execution.getId()));
public Optional<Trigger> findByUid(String uid) {
return findOne(DSL.trueCondition(), field("key").eq(uid));
}
public List<Trigger> findByNextExecutionDateReadyForAllTenants(ZonedDateTime now, ScheduleContextInterface scheduleContextInterface) {

View File

@@ -13,6 +13,7 @@ import io.kestra.core.models.tasks.ExecutableTask;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.WorkerGroup;
import io.kestra.core.models.topologies.FlowTopology;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.models.triggers.multipleflows.MultipleCondition;
import io.kestra.core.models.triggers.multipleflows.MultipleConditionStorageInterface;
import io.kestra.core.queues.QueueException;
@@ -1138,9 +1139,7 @@ public class JdbcExecutor implements ExecutorInterface {
execution.getTrigger().getId()
);
} else {
triggerRepository
.findByExecution(execution)
.ifPresent(trigger -> this.triggerState.update(executionService.resetExecution(flow, execution, trigger)));
triggerRepository.findByUid(Trigger.uid(execution)).ifPresent(trigger -> this.triggerState.update(executionService.resetExecution(flow, execution, trigger)));
}
}
@@ -1241,11 +1240,7 @@ public class JdbcExecutor implements ExecutorInterface {
// purge the trigger: reset scheduler trigger at end
if (execution.getTrigger() != null) {
FlowWithSource flow = executor.getFlow();
triggerRepository
.findByExecution(execution)
.ifPresent(trigger -> {
this.triggerState.update(executionService.resetExecution(flow, execution, trigger));
});
triggerRepository.findByUid(Trigger.uid(execution)).ifPresent(trigger -> this.triggerState.update(executionService.resetExecution(flow, execution, trigger)));
}
// Purge the workerTaskResultQueue and the workerJobQueue

View File

@@ -31,11 +31,11 @@ dependencies {
api enforcedPlatform("com.fasterxml.jackson:jackson-bom:$jacksonVersion")
api enforcedPlatform("org.slf4j:slf4j-api:$slf4jVersion")
api platform("io.micronaut.platform:micronaut-platform:4.9.4")
api platform("io.qameta.allure:allure-bom:2.31.0")
api platform("io.qameta.allure:allure-bom:2.32.0")
// we define cloud bom here for GCP, Azure and AWS so they are aligned for all plugins that use them (secret, storage, oss and ee plugins)
api platform('com.google.cloud:libraries-bom:26.72.0')
api platform('com.google.cloud:libraries-bom:26.73.0')
api platform("com.azure:azure-sdk-bom:1.3.3")
api platform('software.amazon.awssdk:bom:2.40.5')
api platform('software.amazon.awssdk:bom:2.40.10')
api platform("dev.langchain4j:langchain4j-bom:$langchain4jVersion")
api platform("dev.langchain4j:langchain4j-community-bom:$langchain4jCommunityVersion")
@@ -77,12 +77,12 @@ dependencies {
api "org.apache.kafka:kafka-clients:$kafkaVersion"
api "org.apache.kafka:kafka-streams:$kafkaVersion"
// AWS CRT is not included in the AWS BOM but needed for the S3 Transfer manager
api 'software.amazon.awssdk.crt:aws-crt:0.40.3'
api 'software.amazon.awssdk.crt:aws-crt:0.41.0'
// Other libs
api("org.projectlombok:lombok:1.18.42")
api("org.codehaus.janino:janino:3.1.12")
api group: 'org.apache.logging.log4j', name: 'log4j-to-slf4j', version: '2.25.2'
api group: 'org.apache.logging.log4j', name: 'log4j-to-slf4j', version: '2.25.3'
api group: 'org.slf4j', name: 'jul-to-slf4j', version: slf4jVersion
api group: 'org.slf4j', name: 'jcl-over-slf4j', version: slf4jVersion
api group: 'org.fusesource.jansi', name: 'jansi', version: '2.4.2'
@@ -99,11 +99,11 @@ dependencies {
api group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-file', version: mavenResolverVersion
api group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-apache', version: mavenResolverVersion
api 'com.github.oshi:oshi-core:6.9.1'
api 'io.pebbletemplates:pebble:4.0.0'
api 'io.pebbletemplates:pebble:4.1.0'
api group: 'co.elastic.logging', name: 'logback-ecs-encoder', version: '1.7.0'
api group: 'de.focus-shift', name: 'jollyday-core', version: jollydayVersion
api group: 'de.focus-shift', name: 'jollyday-jaxb', version: jollydayVersion
api 'nl.basjes.gitignore:gitignore-reader:1.13.0'
api 'nl.basjes.gitignore:gitignore-reader:1.14.1'
api group: 'dev.failsafe', name: 'failsafe', version: '3.3.2'
api group: 'com.cronutils', name: 'cron-utils', version: '9.2.1'
api group: 'com.github.victools', name: 'jsonschema-generator', version: jsonschemaVersion

View File

@@ -288,7 +288,7 @@ public abstract class AbstractScheduler implements Scheduler {
disableInvalidTrigger(workerTriggerResult.getTriggerContext(), e);
return;
}
this.handleEvaluateWorkerTriggerResult(triggerExecution, nextExecutionDate);
this.handleEvaluateWorkerTriggerResult(triggerExecution, nextExecutionDate, workerTriggerResult.getTrigger());
} else {
ZonedDateTime nextExecutionDate;
try {
@@ -768,7 +768,7 @@ public abstract class AbstractScheduler implements Scheduler {
}
private void handleEvaluateWorkerTriggerResult(SchedulerExecutionWithTrigger result, ZonedDateTime
nextExecutionDate) {
nextExecutionDate, AbstractTrigger abstractTrigger) {
Optional.ofNullable(result)
.ifPresent(executionWithTrigger -> {
log(executionWithTrigger);
@@ -779,6 +779,12 @@ public abstract class AbstractScheduler implements Scheduler {
nextExecutionDate
);
// if the trigger is allowed to run concurrently we do not attached the executio-id to the trigger state
// i.e., the trigger will not be locked
if (abstractTrigger.isAllowConcurrent()) {
trigger = trigger.toBuilder().executionId(null).build();
}
// Worker triggers result is evaluated in another thread with the workerTriggerResultQueue.
// We can then update the trigger directly.
this.saveLastTriggerAndEmitExecution(executionWithTrigger.getExecution(), trigger, triggerToSave -> this.triggerState.update(triggerToSave));
@@ -800,6 +806,12 @@ public abstract class AbstractScheduler implements Scheduler {
if (result.getExecution().getState().getCurrent() == State.Type.FAILED) {
trigger = trigger.resetExecution(State.Type.FAILED);
}
// if the trigger is allowed to run concurrently we do not attached the executio-id to the trigger state
// i.e., the trigger will not be locked
if (((AbstractTrigger)schedule).isAllowConcurrent()) {
trigger = trigger.toBuilder().executionId(null).build();
}
// Schedule triggers are being executed directly from the handle method within the context where triggers are locked.
// So we must save them by passing the scheduleContext.

View File

@@ -91,6 +91,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
assertThat(queueCount.getCount()).isEqualTo(0L);
assertThat(last.get()).isNotNull();
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
}
}
@@ -136,6 +137,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
assertThat(queueCount.getCount()).isEqualTo(0L);
assertThat(last.get()).isNotNull();
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
// Assert that the trigger is now disabled.
// It needs to await on assertion as it will be disabled AFTER we receive a success execution.

View File

@@ -104,6 +104,7 @@ public class SchedulerStreamingTest extends AbstractSchedulerTest {
assertThat(SchedulerStreamingTest.startedEvaluate.get(false), is(1));
assertThat(last.getTrigger().getVariables().get("startedEvaluate"), is(1));
assertTrue(last.getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(last.getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
}
);
}

417
ui/package-lock.json generated
View File

@@ -45,17 +45,17 @@
"path-browserify": "^1.0.1",
"pdfjs-dist": "^5.4.449",
"pinia": "^3.0.4",
"posthog-js": "^1.304.0",
"posthog-js": "^1.308.0",
"rapidoc": "^9.3.8",
"semver": "^7.7.3",
"shiki": "^3.19.0",
"shiki": "^3.20.0",
"vue": "^3.5.25",
"vue-axios": "^3.5.2",
"vue-chartjs": "^5.3.3",
"vue-gtag": "^3.6.3",
"vue-i18n": "^11.2.2",
"vue-material-design-icons": "^5.3.1",
"vue-router": "^4.6.3",
"vue-router": "^4.6.4",
"vue-sidebar-menu": "^5.9.1",
"vue-virtual-scroller": "^2.0.0-beta.8",
"vue3-popper": "^1.5.0",
@@ -66,10 +66,10 @@
"devDependencies": {
"@codecov/vite-plugin": "^1.9.1",
"@esbuild-plugins/node-modules-polyfill": "^0.2.2",
"@eslint/js": "^9.39.1",
"@eslint/js": "^9.39.2",
"@playwright/test": "^1.57.0",
"@rushstack/eslint-patch": "^1.14.1",
"@shikijs/markdown-it": "^3.19.0",
"@shikijs/markdown-it": "^3.20.0",
"@storybook/addon-themes": "^9.1.16",
"@storybook/addon-vitest": "^9.1.16",
"@storybook/test-runner": "^0.23.0",
@@ -77,14 +77,14 @@
"@types/humanize-duration": "^3.27.4",
"@types/js-yaml": "^4.0.9",
"@types/moment": "^2.13.0",
"@types/node": "^25.0.0",
"@types/node": "^25.0.3",
"@types/nprogress": "^0.2.3",
"@types/path-browserify": "^1.0.3",
"@types/semver": "^7.7.1",
"@types/testing-library__jest-dom": "^6.0.0",
"@types/testing-library__user-event": "^4.2.0",
"@typescript-eslint/parser": "^8.49.0",
"@vitejs/plugin-vue": "^6.0.2",
"@typescript-eslint/parser": "^8.50.0",
"@vitejs/plugin-vue": "^6.0.3",
"@vitejs/plugin-vue-jsx": "^5.1.2",
"@vitest/browser": "^3.2.4",
"@vitest/coverage-v8": "^3.2.4",
@@ -93,7 +93,7 @@
"@vueuse/router": "^14.1.0",
"change-case": "5.4.4",
"cross-env": "^10.1.0",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"eslint-plugin-storybook": "^9.1.16",
"eslint-plugin-vue": "^9.33.0",
"globals": "^16.5.0",
@@ -106,29 +106,29 @@
"playwright": "^1.55.0",
"prettier": "^3.7.4",
"rimraf": "^6.1.2",
"rolldown-vite": "^7.2.10",
"rolldown-vite": "^7.2.11",
"rollup-plugin-copy": "^3.5.0",
"sass": "^1.96.0",
"sass": "^1.97.0",
"storybook": "^9.1.16",
"storybook-vue3-router": "^6.0.2",
"ts-node": "^10.9.2",
"typescript": "^5.9.3",
"typescript-eslint": "^8.49.0",
"typescript-eslint": "^8.50.0",
"uuid": "^13.0.0",
"vite": "npm:rolldown-vite@latest",
"vitest": "^3.2.4",
"vue-tsc": "^3.1.8"
},
"optionalDependencies": {
"@esbuild/darwin-arm64": "^0.27.1",
"@esbuild/darwin-x64": "^0.27.1",
"@esbuild/linux-x64": "^0.27.1",
"@rollup/rollup-darwin-arm64": "^4.53.3",
"@rollup/rollup-darwin-x64": "^4.53.3",
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
"@swc/core-darwin-arm64": "^1.15.3",
"@swc/core-darwin-x64": "^1.15.3",
"@swc/core-linux-x64-gnu": "^1.15.3"
"@esbuild/darwin-arm64": "^0.27.2",
"@esbuild/darwin-x64": "^0.27.2",
"@esbuild/linux-x64": "^0.27.2",
"@rollup/rollup-darwin-arm64": "^4.53.5",
"@rollup/rollup-darwin-x64": "^4.53.5",
"@rollup/rollup-linux-x64-gnu": "^4.53.5",
"@swc/core-darwin-arm64": "^1.15.5",
"@swc/core-darwin-x64": "^1.15.5",
"@swc/core-linux-x64-gnu": "^1.15.5"
}
},
"node_modules/@acemir/cssom": {
@@ -1345,9 +1345,9 @@
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.27.1",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.1.tgz",
"integrity": "sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==",
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz",
"integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==",
"cpu": [
"arm64"
],
@@ -1361,9 +1361,9 @@
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.27.1",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.1.tgz",
"integrity": "sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==",
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz",
"integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==",
"cpu": [
"x64"
],
@@ -1547,9 +1547,9 @@
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.27.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.1.tgz",
"integrity": "sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==",
"version": "0.27.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz",
"integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==",
"cpu": [
"x64"
],
@@ -1894,9 +1894,9 @@
}
},
"node_modules/@eslint/js": {
"version": "9.39.1",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz",
"integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==",
"version": "9.39.2",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz",
"integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -3860,9 +3860,9 @@
}
},
"node_modules/@posthog/core": {
"version": "1.7.1",
"resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.7.1.tgz",
"integrity": "sha512-kjK0eFMIpKo9GXIbts8VtAknsoZ18oZorANdtuTj1CbgS28t4ZVq//HAWhnxEuXRTrtkd+SUJ6Ux3j2Af8NCuA==",
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.8.0.tgz",
"integrity": "sha512-SfmG1EdbR+2zpQccgBUxM/snCROB9WGkY7VH1r9iaoTNqoaN9IkmIEA/07cZLY4DxVP8jt6Vdfe3s84xksac1g==",
"license": "MIT",
"dependencies": {
"cross-spawn": "^7.0.6"
@@ -4107,16 +4107,16 @@
}
},
"node_modules/@rolldown/pluginutils": {
"version": "1.0.0-beta.50",
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.50.tgz",
"integrity": "sha512-5e76wQiQVeL1ICOZVUg4LSOVYg9jyhGCin+icYozhsUzM+fHE7kddi1bdiE0jwVqTfkjba3jUFbEkoC9WkdvyA==",
"version": "1.0.0-beta.53",
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.53.tgz",
"integrity": "sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.53.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.3.tgz",
"integrity": "sha512-Nr7SlQeqIBpOV6BHHGZgYBuSdanCXuw09hon14MGOLGmXAFYjx1wNvquVPmpZnl0tLjg25dEdr4IQ6GgyToCUA==",
"version": "4.53.5",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.5.tgz",
"integrity": "sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==",
"cpu": [
"arm64"
],
@@ -4127,9 +4127,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.53.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.3.tgz",
"integrity": "sha512-DZ8N4CSNfl965CmPktJ8oBnfYr3F8dTTNBQkRlffnUarJ2ohudQD17sZBa097J8xhQ26AwhHJ5mvUyQW8ddTsQ==",
"version": "4.53.5",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.5.tgz",
"integrity": "sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==",
"cpu": [
"x64"
],
@@ -4140,9 +4140,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.53.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.3.tgz",
"integrity": "sha512-3EhFi1FU6YL8HTUJZ51imGJWEX//ajQPfqWLI3BQq4TlvHy4X0MOr5q3D2Zof/ka0d5FNdPwZXm3Yyib/UEd+w==",
"version": "4.53.5",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.5.tgz",
"integrity": "sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==",
"cpu": [
"x64"
],
@@ -4179,20 +4179,20 @@
}
},
"node_modules/@shikijs/engine-javascript": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.19.0.tgz",
"integrity": "sha512-ZfWJNm2VMhKkQIKT9qXbs76RRcT0SF/CAvEz0+RkpUDAoDaCx0uFdCGzSRiD9gSlhm6AHkjdieOBJMaO2eC1rQ==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.20.0.tgz",
"integrity": "sha512-OFx8fHAZuk7I42Z9YAdZ95To6jDePQ9Rnfbw9uSRTSbBhYBp1kEOKv/3jOimcj3VRUKusDYM6DswLauwfhboLg==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.19.0",
"@shikijs/types": "3.20.0",
"@shikijs/vscode-textmate": "^10.0.2",
"oniguruma-to-es": "^4.3.4"
}
},
"node_modules/@shikijs/engine-javascript/node_modules/@shikijs/types": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.19.0.tgz",
"integrity": "sha512-Z2hdeEQlzuntf/BZpFG8a+Fsw9UVXdML7w0o3TgSXV3yNESGon+bs9ITkQb3Ki7zxoXOOu5oJWqZ2uto06V9iQ==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
"license": "MIT",
"dependencies": {
"@shikijs/vscode-textmate": "^10.0.2",
@@ -4200,16 +4200,35 @@
}
},
"node_modules/@shikijs/engine-oniguruma": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.19.0.tgz",
"integrity": "sha512-1hRxtYIJfJSZeM5ivbUXv9hcJP3PWRo5prG/V2sWwiubUKTa+7P62d2qxCW8jiVFX4pgRHhnHNp+qeR7Xl+6kg==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.20.0.tgz",
"integrity": "sha512-Yx3gy7xLzM0ZOjqoxciHjA7dAt5tyzJE3L4uQoM83agahy+PlW244XJSrmJRSBvGYELDhYXPacD4R/cauV5bzQ==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.19.0",
"@shikijs/types": "3.20.0",
"@shikijs/vscode-textmate": "^10.0.2"
}
},
"node_modules/@shikijs/engine-oniguruma/node_modules/@shikijs/types": {
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
"license": "MIT",
"dependencies": {
"@shikijs/vscode-textmate": "^10.0.2",
"@types/hast": "^3.0.4"
}
},
"node_modules/@shikijs/langs": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.19.0.tgz",
"integrity": "sha512-dBMFzzg1QiXqCVQ5ONc0z2ebyoi5BKz+MtfByLm0o5/nbUu3Iz8uaTCa5uzGiscQKm7lVShfZHU1+OG3t5hgwg==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.19.0"
}
},
"node_modules/@shikijs/langs/node_modules/@shikijs/types": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.19.0.tgz",
"integrity": "sha512-Z2hdeEQlzuntf/BZpFG8a+Fsw9UVXdML7w0o3TgSXV3yNESGon+bs9ITkQb3Ki7zxoXOOu5oJWqZ2uto06V9iQ==",
@@ -4219,24 +4238,15 @@
"@types/hast": "^3.0.4"
}
},
"node_modules/@shikijs/langs": {
"version": "3.15.0",
"resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.15.0.tgz",
"integrity": "sha512-WpRvEFvkVvO65uKYW4Rzxs+IG0gToyM8SARQMtGGsH4GDMNZrr60qdggXrFOsdfOVssG/QQGEl3FnJ3EZ+8w8A==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.15.0"
}
},
"node_modules/@shikijs/markdown-it": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/markdown-it/-/markdown-it-3.19.0.tgz",
"integrity": "sha512-T9Lt3pPZoK5uu7t2/jQ6OzhwZ9FPBtZhHKSk62IdPaJkj2gVPeRWSJb825dpliSFvURp4JQvcInBkrJZYmhynQ==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/markdown-it/-/markdown-it-3.20.0.tgz",
"integrity": "sha512-2zX7wC0ow3zJsUr29tCoSYeKrGkI+RuOyAGeMiCf5FT4Qq36/cVAzhNBxjR8UQ49bhte5R1JKw/KLpAFEbbbkg==",
"dev": true,
"license": "MIT",
"dependencies": {
"markdown-it": "^14.1.0",
"shiki": "3.19.0"
"shiki": "3.20.0"
},
"peerDependencies": {
"markdown-it-async": "^2.2.0"
@@ -4248,12 +4258,22 @@
}
},
"node_modules/@shikijs/themes": {
"version": "3.15.0",
"resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.15.0.tgz",
"integrity": "sha512-8ow2zWb1IDvCKjYb0KiLNrK4offFdkfNVPXb1OZykpLCzRU6j+efkY+Y7VQjNlNFXonSw+4AOdGYtmqykDbRiQ==",
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.19.0.tgz",
"integrity": "sha512-H36qw+oh91Y0s6OlFfdSuQ0Ld+5CgB/VE6gNPK+Hk4VRbVG/XQgkjnt4KzfnnoO6tZPtKJKHPjwebOCfjd6F8A==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.15.0"
"@shikijs/types": "3.19.0"
}
},
"node_modules/@shikijs/themes/node_modules/@shikijs/types": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.19.0.tgz",
"integrity": "sha512-Z2hdeEQlzuntf/BZpFG8a+Fsw9UVXdML7w0o3TgSXV3yNESGon+bs9ITkQb3Ki7zxoXOOu5oJWqZ2uto06V9iQ==",
"license": "MIT",
"dependencies": {
"@shikijs/vscode-textmate": "^10.0.2",
"@types/hast": "^3.0.4"
}
},
"node_modules/@shikijs/transformers": {
@@ -5164,9 +5184,9 @@
}
},
"node_modules/@swc/core-darwin-arm64": {
"version": "1.15.3",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.3.tgz",
"integrity": "sha512-AXfeQn0CvcQ4cndlIshETx6jrAM45oeUrK8YeEY6oUZU/qzz0Id0CyvlEywxkWVC81Ajpd8TQQ1fW5yx6zQWkQ==",
"version": "1.15.5",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.5.tgz",
"integrity": "sha512-RvdpUcXrIz12yONzOdQrJbEnq23cOc2IHOU1eB8kPxPNNInlm4YTzZEA3zf3PusNpZZLxwArPVLCg0QsFQoTYw==",
"cpu": [
"arm64"
],
@@ -5180,9 +5200,9 @@
}
},
"node_modules/@swc/core-darwin-x64": {
"version": "1.15.3",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.3.tgz",
"integrity": "sha512-p68OeCz1ui+MZYG4wmfJGvcsAcFYb6Sl25H9TxWl+GkBgmNimIiRdnypK9nBGlqMZAcxngNPtnG3kEMNnvoJ2A==",
"version": "1.15.5",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.5.tgz",
"integrity": "sha512-ufJnz3UAff/8G5OfqZZc5cTQfGtXyXVLTB8TGT0xjkvEbfFg8jZUMDBnZT/Cn0k214JhMjiLCNl0A8aY/OKsYQ==",
"cpu": [
"x64"
],
@@ -5247,9 +5267,9 @@
}
},
"node_modules/@swc/core-linux-x64-gnu": {
"version": "1.15.3",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.3.tgz",
"integrity": "sha512-aKttAZnz8YB1VJwPQZtyU8Uk0BfMP63iDMkvjhJzRZVgySmqt/apWSdnoIcZlUoGheBrcqbMC17GGUmur7OT5A==",
"version": "1.15.5",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.5.tgz",
"integrity": "sha512-98kuPS0lZVgjmc/2uTm39r1/OfwKM0PM13ZllOAWi5avJVjRd/j1xA9rKeUzHDWt+ocH9mTCQsAT1jjKSq45bg==",
"cpu": [
"x64"
],
@@ -6032,9 +6052,9 @@
"license": "MIT"
},
"node_modules/@types/node": {
"version": "25.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.0.tgz",
"integrity": "sha512-rl78HwuZlaDIUSeUKkmogkhebA+8K1Hy7tddZuJ3D0xV8pZSfsYGTsliGUol1JPzu9EKnTxPC4L1fiWouStRew==",
"version": "25.0.3",
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz",
"integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -6146,17 +6166,17 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.49.0.tgz",
"integrity": "sha512-JXij0vzIaTtCwu6SxTh8qBc66kmf1xs7pI4UOiMDFVct6q86G0Zs7KRcEoJgY3Cav3x5Tq0MF5jwgpgLqgKG3A==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.50.0.tgz",
"integrity": "sha512-O7QnmOXYKVtPrfYzMolrCTfkezCJS9+ljLdKW/+DCvRsc3UAz+sbH6Xcsv7p30+0OwUbeWfUDAQE0vpabZ3QLg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
"@typescript-eslint/scope-manager": "8.49.0",
"@typescript-eslint/type-utils": "8.49.0",
"@typescript-eslint/utils": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0",
"@typescript-eslint/scope-manager": "8.50.0",
"@typescript-eslint/type-utils": "8.50.0",
"@typescript-eslint/utils": "8.50.0",
"@typescript-eslint/visitor-keys": "8.50.0",
"ignore": "^7.0.0",
"natural-compare": "^1.4.0",
"ts-api-utils": "^2.1.0"
@@ -6169,22 +6189,22 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"@typescript-eslint/parser": "^8.49.0",
"@typescript-eslint/parser": "^8.50.0",
"eslint": "^8.57.0 || ^9.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/parser": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.49.0.tgz",
"integrity": "sha512-N9lBGA9o9aqb1hVMc9hzySbhKibHmB+N3IpoShyV6HyQYRGIhlrO5rQgttypi+yEeKsKI4idxC8Jw6gXKD4THA==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.50.0.tgz",
"integrity": "sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/scope-manager": "8.49.0",
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0",
"@typescript-eslint/scope-manager": "8.50.0",
"@typescript-eslint/types": "8.50.0",
"@typescript-eslint/typescript-estree": "8.50.0",
"@typescript-eslint/visitor-keys": "8.50.0",
"debug": "^4.3.4"
},
"engines": {
@@ -6200,14 +6220,14 @@
}
},
"node_modules/@typescript-eslint/project-service": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.49.0.tgz",
"integrity": "sha512-/wJN0/DKkmRUMXjZUXYZpD1NEQzQAAn9QWfGwo+Ai8gnzqH7tvqS7oNVdTjKqOcPyVIdZdyCMoqN66Ia789e7g==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.50.0.tgz",
"integrity": "sha512-Cg/nQcL1BcoTijEWyx4mkVC56r8dj44bFDvBdygifuS20f3OZCHmFbjF34DPSi07kwlFvqfv/xOLnJ5DquxSGQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/tsconfig-utils": "^8.49.0",
"@typescript-eslint/types": "^8.49.0",
"@typescript-eslint/tsconfig-utils": "^8.50.0",
"@typescript-eslint/types": "^8.50.0",
"debug": "^4.3.4"
},
"engines": {
@@ -6222,14 +6242,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.49.0.tgz",
"integrity": "sha512-npgS3zi+/30KSOkXNs0LQXtsg9ekZ8OISAOLGWA/ZOEn0ZH74Ginfl7foziV8DT+D98WfQ5Kopwqb/PZOaIJGg==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.50.0.tgz",
"integrity": "sha512-xCwfuCZjhIqy7+HKxBLrDVT5q/iq7XBVBXLn57RTIIpelLtEIZHXAF/Upa3+gaCpeV1NNS5Z9A+ID6jn50VD4A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0"
"@typescript-eslint/types": "8.50.0",
"@typescript-eslint/visitor-keys": "8.50.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -6240,9 +6260,9 @@
}
},
"node_modules/@typescript-eslint/tsconfig-utils": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.49.0.tgz",
"integrity": "sha512-8prixNi1/6nawsRYxet4YOhnbW+W9FK/bQPxsGB1D3ZrDzbJ5FXw5XmzxZv82X3B+ZccuSxo/X8q9nQ+mFecWA==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.50.0.tgz",
"integrity": "sha512-vxd3G/ybKTSlm31MOA96gqvrRGv9RJ7LGtZCn2Vrc5htA0zCDvcMqUkifcjrWNNKXHUU3WCkYOzzVSFBd0wa2w==",
"dev": true,
"license": "MIT",
"engines": {
@@ -6257,15 +6277,15 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.49.0.tgz",
"integrity": "sha512-KTExJfQ+svY8I10P4HdxKzWsvtVnsuCifU5MvXrRwoP2KOlNZ9ADNEWWsQTJgMxLzS5VLQKDjkCT/YzgsnqmZg==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.50.0.tgz",
"integrity": "sha512-7OciHT2lKCewR0mFoBrvZJ4AXTMe/sYOe87289WAViOocEmDjjv8MvIOT2XESuKj9jp8u3SZYUSh89QA4S1kQw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0",
"@typescript-eslint/utils": "8.49.0",
"@typescript-eslint/types": "8.50.0",
"@typescript-eslint/typescript-estree": "8.50.0",
"@typescript-eslint/utils": "8.50.0",
"debug": "^4.3.4",
"ts-api-utils": "^2.1.0"
},
@@ -6282,9 +6302,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.49.0.tgz",
"integrity": "sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.50.0.tgz",
"integrity": "sha512-iX1mgmGrXdANhhITbpp2QQM2fGehBse9LbTf0sidWK6yg/NE+uhV5dfU1g6EYPlcReYmkE9QLPq/2irKAmtS9w==",
"dev": true,
"license": "MIT",
"engines": {
@@ -6296,16 +6316,16 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.49.0.tgz",
"integrity": "sha512-jrLdRuAbPfPIdYNppHJ/D0wN+wwNfJ32YTAm10eJVsFmrVpXQnDWBn8niCSMlWjvml8jsce5E/O+86IQtTbJWA==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.50.0.tgz",
"integrity": "sha512-W7SVAGBR/IX7zm1t70Yujpbk+zdPq/u4soeFSknWFdXIFuWsBGBOUu/Tn/I6KHSKvSh91OiMuaSnYp3mtPt5IQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/project-service": "8.49.0",
"@typescript-eslint/tsconfig-utils": "8.49.0",
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0",
"@typescript-eslint/project-service": "8.50.0",
"@typescript-eslint/tsconfig-utils": "8.50.0",
"@typescript-eslint/types": "8.50.0",
"@typescript-eslint/visitor-keys": "8.50.0",
"debug": "^4.3.4",
"minimatch": "^9.0.4",
"semver": "^7.6.0",
@@ -6324,16 +6344,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.49.0.tgz",
"integrity": "sha512-N3W7rJw7Rw+z1tRsHZbK395TWSYvufBXumYtEGzypgMUthlg0/hmCImeA8hgO2d2G4pd7ftpxxul2J8OdtdaFA==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.50.0.tgz",
"integrity": "sha512-87KgUXET09CRjGCi2Ejxy3PULXna63/bMYv72tCAlDJC3Yqwln0HiFJ3VJMst2+mEtNtZu5oFvX4qJGjKsnAgg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.7.0",
"@typescript-eslint/scope-manager": "8.49.0",
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0"
"@typescript-eslint/scope-manager": "8.50.0",
"@typescript-eslint/types": "8.50.0",
"@typescript-eslint/typescript-estree": "8.50.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -6348,13 +6368,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.49.0.tgz",
"integrity": "sha512-LlKaciDe3GmZFphXIc79THF/YYBugZ7FS1pO581E/edlVVNbZKDy93evqmrfQ9/Y4uN0vVhX4iuchq26mK/iiA==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.50.0.tgz",
"integrity": "sha512-Xzmnb58+Db78gT/CCj/PVCvK+zxbnsw6F+O1oheYszJbBSdEjVhQi3C/Xttzxgi/GLmpvOggRs1RFpiJ8+c34Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/types": "8.50.0",
"eslint-visitor-keys": "^4.2.1"
},
"engines": {
@@ -6372,19 +6392,19 @@
"license": "ISC"
},
"node_modules/@vitejs/plugin-vue": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.2.tgz",
"integrity": "sha512-iHmwV3QcVGGvSC1BG5bZ4z6iwa1SOpAPWmnjOErd4Ske+lZua5K9TtAVdx0gMBClJ28DViCbSmZitjWZsWO3LA==",
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.3.tgz",
"integrity": "sha512-TlGPkLFLVOY3T7fZrwdvKpjprR3s4fxRln0ORDo1VQ7HHyxJwTlrjKU3kpVWTlaAjIEuCTokmjkZnr8Tpc925w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@rolldown/pluginutils": "1.0.0-beta.50"
"@rolldown/pluginutils": "1.0.0-beta.53"
},
"engines": {
"node": "^20.19.0 || >=22.12.0"
},
"peerDependencies": {
"vite": "^5.0.0 || ^6.0.0 || ^7.0.0",
"vite": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0",
"vue": "^3.2.25"
}
},
@@ -9986,9 +10006,9 @@
}
},
"node_modules/eslint": {
"version": "9.39.1",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz",
"integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==",
"version": "9.39.2",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz",
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -9998,7 +10018,7 @@
"@eslint/config-helpers": "^0.4.2",
"@eslint/core": "^0.17.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "9.39.1",
"@eslint/js": "9.39.2",
"@eslint/plugin-kit": "^0.4.1",
"@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
@@ -17070,12 +17090,12 @@
}
},
"node_modules/posthog-js": {
"version": "1.304.0",
"resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.304.0.tgz",
"integrity": "sha512-revqoppmJ5y1Oa9iRUb3P8w1htfxZdrSAe+elSNMxvl7wxY62qWN7Q0kE5Sk81o1qLHa6drPhVKa/dppWOfSUw==",
"version": "1.308.0",
"resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.308.0.tgz",
"integrity": "sha512-6zlYlltfEV8GTSJMbb6+TEIon6fTY/pO0G6HATWnUmBQ06oY9F8PCtGdWQrYgFtJKuXby5Quzz9caN/V8D35XQ==",
"license": "SEE LICENSE IN LICENSE",
"dependencies": {
"@posthog/core": "1.7.1",
"@posthog/core": "1.8.0",
"core-js": "^3.38.1",
"fflate": "^0.4.8",
"preact": "^10.19.3",
@@ -18179,9 +18199,9 @@
}
},
"node_modules/rolldown-vite": {
"version": "7.2.10",
"resolved": "https://registry.npmjs.org/rolldown-vite/-/rolldown-vite-7.2.10.tgz",
"integrity": "sha512-v2ekZjuVLfumjp1Cr7LSQM1n2oOo3+gMruhOgT0Q4/cQ2J3nkTDLTAWLQQ86UHMbFYyVIN1wGh8BEZbvjkyctg==",
"version": "7.2.11",
"resolved": "https://registry.npmjs.org/rolldown-vite/-/rolldown-vite-7.2.11.tgz",
"integrity": "sha512-WwCantGLbztBNipg+WwcA+a1c3Mo9LPY0VZ35IFXnUsQyZzsMHtzmy+H5PqELPj3AOauI9L/HMCjoJZp3i9eFg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -18495,13 +18515,6 @@
"node": "^20.19.0 || >=22.12.0"
}
},
"node_modules/rolldown-vite/node_modules/@rolldown/pluginutils": {
"version": "1.0.0-beta.53",
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.53.tgz",
"integrity": "sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==",
"dev": true,
"license": "MIT"
},
"node_modules/rolldown-vite/node_modules/fdir": {
"version": "6.5.0",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
@@ -18768,9 +18781,9 @@
"license": "MIT"
},
"node_modules/sass": {
"version": "1.96.0",
"resolved": "https://registry.npmjs.org/sass/-/sass-1.96.0.tgz",
"integrity": "sha512-8u4xqqUeugGNCYwr9ARNtQKTOj4KmYiJAVKXf2CTIivTCR51j96htbMKWDru8H5SaQWpyVgTfOF8Ylyf5pun1Q==",
"version": "1.97.0",
"resolved": "https://registry.npmjs.org/sass/-/sass-1.97.0.tgz",
"integrity": "sha512-KR0igP1z4avUJetEuIeOdDlwaUDvkH8wSx7FdSjyYBS3dpyX3TzHfAMO0G1Q4/3cdjcmi3r7idh+KCmKqS+KeQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -18874,55 +18887,55 @@
}
},
"node_modules/shiki": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/shiki/-/shiki-3.19.0.tgz",
"integrity": "sha512-77VJr3OR/VUZzPiStyRhADmO2jApMM0V2b1qf0RpfWya8Zr1PeZev5AEpPGAAKWdiYUtcZGBE4F5QvJml1PvWA==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/shiki/-/shiki-3.20.0.tgz",
"integrity": "sha512-kgCOlsnyWb+p0WU+01RjkCH+eBVsjL1jOwUYWv0YDWkM2/A46+LDKVs5yZCUXjJG6bj4ndFoAg5iLIIue6dulg==",
"license": "MIT",
"dependencies": {
"@shikijs/core": "3.19.0",
"@shikijs/engine-javascript": "3.19.0",
"@shikijs/engine-oniguruma": "3.19.0",
"@shikijs/langs": "3.19.0",
"@shikijs/themes": "3.19.0",
"@shikijs/types": "3.19.0",
"@shikijs/core": "3.20.0",
"@shikijs/engine-javascript": "3.20.0",
"@shikijs/engine-oniguruma": "3.20.0",
"@shikijs/langs": "3.20.0",
"@shikijs/themes": "3.20.0",
"@shikijs/types": "3.20.0",
"@shikijs/vscode-textmate": "^10.0.2",
"@types/hast": "^3.0.4"
}
},
"node_modules/shiki/node_modules/@shikijs/core": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.19.0.tgz",
"integrity": "sha512-L7SrRibU7ZoYi1/TrZsJOFAnnHyLTE1SwHG1yNWjZIVCqjOEmCSuK2ZO9thnRbJG6TOkPp+Z963JmpCNw5nzvA==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.20.0.tgz",
"integrity": "sha512-f2ED7HYV4JEk827mtMDwe/yQ25pRiXZmtHjWF8uzZKuKiEsJR7Ce1nuQ+HhV9FzDcbIo4ObBCD9GPTzNuy9S1g==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.19.0",
"@shikijs/types": "3.20.0",
"@shikijs/vscode-textmate": "^10.0.2",
"@types/hast": "^3.0.4",
"hast-util-to-html": "^9.0.5"
}
},
"node_modules/shiki/node_modules/@shikijs/langs": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.19.0.tgz",
"integrity": "sha512-dBMFzzg1QiXqCVQ5ONc0z2ebyoi5BKz+MtfByLm0o5/nbUu3Iz8uaTCa5uzGiscQKm7lVShfZHU1+OG3t5hgwg==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.20.0.tgz",
"integrity": "sha512-le+bssCxcSHrygCWuOrYJHvjus6zhQ2K7q/0mgjiffRbkhM4o1EWu2m+29l0yEsHDbWaWPNnDUTRVVBvBBeKaA==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.19.0"
"@shikijs/types": "3.20.0"
}
},
"node_modules/shiki/node_modules/@shikijs/themes": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.19.0.tgz",
"integrity": "sha512-H36qw+oh91Y0s6OlFfdSuQ0Ld+5CgB/VE6gNPK+Hk4VRbVG/XQgkjnt4KzfnnoO6tZPtKJKHPjwebOCfjd6F8A==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.20.0.tgz",
"integrity": "sha512-U1NSU7Sl26Q7ErRvJUouArxfM2euWqq1xaSrbqMu2iqa+tSp0D1Yah8216sDYbdDHw4C8b75UpE65eWorm2erQ==",
"license": "MIT",
"dependencies": {
"@shikijs/types": "3.19.0"
"@shikijs/types": "3.20.0"
}
},
"node_modules/shiki/node_modules/@shikijs/types": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.19.0.tgz",
"integrity": "sha512-Z2hdeEQlzuntf/BZpFG8a+Fsw9UVXdML7w0o3TgSXV3yNESGon+bs9ITkQb3Ki7zxoXOOu5oJWqZ2uto06V9iQ==",
"version": "3.20.0",
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
"license": "MIT",
"dependencies": {
"@shikijs/vscode-textmate": "^10.0.2",
@@ -20112,16 +20125,16 @@
}
},
"node_modules/typescript-eslint": {
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.49.0.tgz",
"integrity": "sha512-zRSVH1WXD0uXczCXw+nsdjGPUdx4dfrs5VQoHnUWmv1U3oNlAKv4FUNdLDhVUg+gYn+a5hUESqch//Rv5wVhrg==",
"version": "8.50.0",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.50.0.tgz",
"integrity": "sha512-Q1/6yNUmCpH94fbgMUMg2/BSAr/6U7GBk61kZTv1/asghQOWOjTlp9K8mixS5NcJmm2creY+UFfGeW/+OcA64A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/eslint-plugin": "8.49.0",
"@typescript-eslint/parser": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0",
"@typescript-eslint/utils": "8.49.0"
"@typescript-eslint/eslint-plugin": "8.50.0",
"@typescript-eslint/parser": "8.50.0",
"@typescript-eslint/typescript-estree": "8.50.0",
"@typescript-eslint/utils": "8.50.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -21117,9 +21130,9 @@
}
},
"node_modules/vue-router": {
"version": "4.6.3",
"resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.3.tgz",
"integrity": "sha512-ARBedLm9YlbvQomnmq91Os7ck6efydTSpRP3nuOKCvgJOHNrhRoJDSKtee8kcL1Vf7nz6U+PMBL+hTvR3bTVQg==",
"version": "4.6.4",
"resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.4.tgz",
"integrity": "sha512-Hz9q5sa33Yhduglwz6g9skT8OBPii+4bFn88w6J+J4MfEo4KRRpmiNG/hHHkdbRFlLBOqxN8y8gf2Fb0MTUgVg==",
"license": "MIT",
"dependencies": {
"@vue/devtools-api": "^6.6.4"

View File

@@ -59,17 +59,17 @@
"path-browserify": "^1.0.1",
"pdfjs-dist": "^5.4.449",
"pinia": "^3.0.4",
"posthog-js": "^1.304.0",
"posthog-js": "^1.308.0",
"rapidoc": "^9.3.8",
"semver": "^7.7.3",
"shiki": "^3.19.0",
"shiki": "^3.20.0",
"vue": "^3.5.25",
"vue-axios": "^3.5.2",
"vue-chartjs": "^5.3.3",
"vue-gtag": "^3.6.3",
"vue-i18n": "^11.2.2",
"vue-material-design-icons": "^5.3.1",
"vue-router": "^4.6.3",
"vue-router": "^4.6.4",
"vue-sidebar-menu": "^5.9.1",
"vue-virtual-scroller": "^2.0.0-beta.8",
"vue3-popper": "^1.5.0",
@@ -80,10 +80,10 @@
"devDependencies": {
"@codecov/vite-plugin": "^1.9.1",
"@esbuild-plugins/node-modules-polyfill": "^0.2.2",
"@eslint/js": "^9.39.1",
"@eslint/js": "^9.39.2",
"@playwright/test": "^1.57.0",
"@rushstack/eslint-patch": "^1.14.1",
"@shikijs/markdown-it": "^3.19.0",
"@shikijs/markdown-it": "^3.20.0",
"@storybook/addon-themes": "^9.1.16",
"@storybook/addon-vitest": "^9.1.16",
"@storybook/test-runner": "^0.23.0",
@@ -91,14 +91,14 @@
"@types/humanize-duration": "^3.27.4",
"@types/js-yaml": "^4.0.9",
"@types/moment": "^2.13.0",
"@types/node": "^25.0.0",
"@types/node": "^25.0.3",
"@types/nprogress": "^0.2.3",
"@types/path-browserify": "^1.0.3",
"@types/semver": "^7.7.1",
"@types/testing-library__jest-dom": "^6.0.0",
"@types/testing-library__user-event": "^4.2.0",
"@typescript-eslint/parser": "^8.49.0",
"@vitejs/plugin-vue": "^6.0.2",
"@typescript-eslint/parser": "^8.50.0",
"@vitejs/plugin-vue": "^6.0.3",
"@vitejs/plugin-vue-jsx": "^5.1.2",
"@vitest/browser": "^3.2.4",
"@vitest/coverage-v8": "^3.2.4",
@@ -107,7 +107,7 @@
"@vueuse/router": "^14.1.0",
"change-case": "5.4.4",
"cross-env": "^10.1.0",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"eslint-plugin-storybook": "^9.1.16",
"eslint-plugin-vue": "^9.33.0",
"globals": "^16.5.0",
@@ -120,29 +120,29 @@
"playwright": "^1.55.0",
"prettier": "^3.7.4",
"rimraf": "^6.1.2",
"rolldown-vite": "^7.2.10",
"rolldown-vite": "^7.2.11",
"rollup-plugin-copy": "^3.5.0",
"sass": "^1.96.0",
"sass": "^1.97.0",
"storybook": "^9.1.16",
"storybook-vue3-router": "^6.0.2",
"ts-node": "^10.9.2",
"typescript": "^5.9.3",
"typescript-eslint": "^8.49.0",
"typescript-eslint": "^8.50.0",
"uuid": "^13.0.0",
"vite": "npm:rolldown-vite@latest",
"vitest": "^3.2.4",
"vue-tsc": "^3.1.8"
},
"optionalDependencies": {
"@esbuild/darwin-arm64": "^0.27.1",
"@esbuild/darwin-x64": "^0.27.1",
"@esbuild/linux-x64": "^0.27.1",
"@rollup/rollup-darwin-arm64": "^4.53.3",
"@rollup/rollup-darwin-x64": "^4.53.3",
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
"@swc/core-darwin-arm64": "^1.15.3",
"@swc/core-darwin-x64": "^1.15.3",
"@swc/core-linux-x64-gnu": "^1.15.3"
"@esbuild/darwin-arm64": "^0.27.2",
"@esbuild/darwin-x64": "^0.27.2",
"@esbuild/linux-x64": "^0.27.2",
"@rollup/rollup-darwin-arm64": "^4.53.5",
"@rollup/rollup-darwin-x64": "^4.53.5",
"@rollup/rollup-linux-x64-gnu": "^4.53.5",
"@swc/core-darwin-arm64": "^1.15.5",
"@swc/core-darwin-x64": "^1.15.5",
"@swc/core-linux-x64-gnu": "^1.15.5"
},
"overrides": {
"bootstrap": {

View File

@@ -204,24 +204,26 @@
className="row-action"
>
<template #default="scope">
<el-button v-if="scope.row.executionId || scope.row.evaluateRunningDate">
<Kicon
:tooltip="$t(`unlock trigger.tooltip.${scope.row.executionId ? 'execution' : 'evaluation'}`)"
placement="left"
@click="triggerToUnlock = scope.row"
>
<LockOff />
</Kicon>
</el-button>
<el-button>
<Kicon
:tooltip="$t('delete trigger')"
placement="left"
@click="confirmDeleteTrigger(scope.row)"
>
<Delete />
</Kicon>
</el-button>
<div class="action-container">
<el-button v-if="scope.row.executionId || scope.row.evaluateRunningDate">
<Kicon
:tooltip="$t(`unlock trigger.tooltip.${scope.row.executionId ? 'execution' : 'evaluation'}`)"
placement="left"
@click="triggerToUnlock = scope.row"
>
<LockOff />
</Kicon>
</el-button>
<el-button>
<Kicon
:tooltip="$t('delete trigger')"
placement="left"
@click="confirmDeleteTrigger(scope.row)"
>
<Delete />
</Kicon>
</el-button>
</div>
</template>
</el-table-column>
<el-table-column :label="$t('backfill')" columnKey="backfill">
@@ -855,6 +857,12 @@
align-items: center;
}
.action-container {
display: flex;
align-items: center;
gap: 5px;
}
.statusIcon {
font-size: large;
}
@@ -927,4 +935,4 @@
}
}
}
</style>
</style>

View File

@@ -25,22 +25,18 @@ export function applyDefaultFilters(
includeScope,
legacyQuery,
}: DefaultFilterOptions = {}): { query: LocationQuery, change: boolean } {
if(currentQuery && Object.keys(currentQuery).length > 0) {
return {
query: currentQuery,
change: false,
}
}
const query = {...currentQuery};
let change = false;
if (namespace === undefined && defaultNamespace() && !hasFilterKey(query, NAMESPACE_FILTER_PREFIX)) {
query[legacyQuery ? "namespace" : `${NAMESPACE_FILTER_PREFIX}[PREFIX]`] = defaultNamespace();
change = true;
}
if (includeScope && !hasFilterKey(query, SCOPE_FILTER_PREFIX)) {
query[legacyQuery ? "scope" : `${SCOPE_FILTER_PREFIX}[EQUALS]`] = "USER";
change = true;
}
const TIME_FILTER_KEYS = /startDate|endDate|timeRange/;
@@ -48,9 +44,10 @@ export function applyDefaultFilters(
if (includeTimeRange && !Object.keys(query).some(key => TIME_FILTER_KEYS.test(key))) {
const defaultDuration = useMiscStore().configs?.chartDefaultDuration ?? "P30D";
query[legacyQuery ? "timeRange" : `${TIME_RANGE_FILTER_PREFIX}[EQUALS]`] = defaultDuration;
change = true;
}
return {query, change: true};
return {query, change};
}
export function useDefaultFilter(

View File

@@ -20,6 +20,9 @@
import {useVueTour} from "../../composables/useVueTour";
import type {BlueprintType} from "../../stores/blueprints"
import {useAuthStore} from "../../override/stores/auth";
import permission from "../../models/permission";
import action from "../../models/action";
const route = useRoute();
const {t} = useI18n();
@@ -29,13 +32,21 @@
const blueprintsStore = useBlueprintsStore();
const coreStore = useCoreStore();
const flowStore = useFlowStore();
const authStore = useAuthStore();
const setupFlow = async () => {
const blueprintId = route.query.blueprintId as string;
const blueprintSource = route.query.blueprintSource as BlueprintType;
const implicitDefaultNamespace = authStore.user.getNamespacesForAction(
permission.FLOW,
action.CREATE,
)[0];
let flowYaml = "";
const id = getRandomID();
const selectedNamespace = (route.query.namespace as string) || defaultNamespace() || "company.team";
const selectedNamespace = (route.query.namespace as string)
?? defaultNamespace()
?? implicitDefaultNamespace
?? "company.team";
if (route.query.copy && flowStore.flow) {
flowYaml = flowStore.flow.source;

View File

@@ -10,15 +10,15 @@
</el-alert>
</div>
<el-form labelPosition="top" :model="inputs" ref="form" @submit.prevent="false">
<InputsForm
:initialInputs="flow.inputs"
:selectedTrigger="selectedTrigger"
:flow="flow"
<InputsForm
:initialInputs="flow.inputs"
:selectedTrigger="selectedTrigger"
:flow="flow"
v-model="inputs"
:executeClicked="executeClicked"
@confirm="onSubmit($refs.form)"
@update:model-value-no-default="values => inputsNoDefaults=values"
@update:checks="values => checks=values"
@update:model-value-no-default="values => inputsNoDefaults=values"
@update:checks="values => checks=values"
/>
<el-collapse v-model="collapseName">
@@ -208,7 +208,7 @@
this.executionLabels
.filter(label => label.key && label.value)
.map(label => `${label.key}:${label.value}`)
)],
), "system.from:ui"],
scheduleDate: this.scheduleDate
});
} else {
@@ -221,7 +221,7 @@
this.executionLabels
.filter(label => label.key && label.value)
.map(label => `${label.key}:${label.value}`)
)],
), "system.from:ui"],
scheduleDate: this.$moment(this.scheduleDate).tz(localStorage.getItem(storageKeys.TIMEZONE_STORAGE_KEY) ?? moment.tz.guess()).toISOString(true),
nextStep: true,
});

View File

@@ -1,7 +1,7 @@
<template>
<span ref="rootContainer">
<!-- Valid -->
<el-button v-if="!errors && !warnings &&!infos" v-bind="$attrs" :link="link" :size="size" type="default" class="success square" disabled>
<el-button v-if="!errors && !warnings && !infos" v-bind="$attrs" :link="link" :size="size" type="default" class="success square" disabled>
<CheckBoldIcon class="text-success" />
</el-button>
@@ -157,6 +157,7 @@
}
&.success {
cursor: default;
border-color: var(--ks-border-success);
}

View File

@@ -5,19 +5,19 @@
<ValidationError
class="validation"
tooltipPlacement="bottom-start"
:errors="flowErrors"
:errors="flowStore.flowErrors"
:warnings="flowWarnings"
:infos="flowInfos"
:infos="flowStore.flowInfos"
/>
<EditorButtons
:isCreating="flowStore.isCreating"
:isReadOnly="isReadOnly"
:isReadOnly="flowStore.isReadOnly"
:canDelete="true"
:isAllowedEdit="isAllowedEdit"
:isAllowedEdit="flowStore.isAllowedEdit"
:haveChange="haveChange"
:flowHaveTasks="Boolean(flowHaveTasks)"
:errors="flowErrors"
:flowHaveTasks="Boolean(flowStore.flowHaveTasks)"
:errors="flowStore.flowErrors"
:warnings="flowWarnings"
@save="save"
@copy="
@@ -49,7 +49,6 @@
import ValidationError from "../flows/ValidationError.vue";
import localUtils from "../../utils/utils";
import {useFlowOutdatedErrors} from "./flowOutdatedErrors";
import {useFlowStore} from "../../stores/flow";
import {useToast} from "../../utils/toast";
@@ -73,22 +72,14 @@
const route = useRoute()
const routeParams = computed(() => route.params)
const {translateError, translateErrorWithKey} = useFlowOutdatedErrors();
// If playground is not defined, enable it by default
const isSettingsPlaygroundEnabled = computed(() => localStorage.getItem("editorPlayground") === "false" ? false : true);
const isReadOnly = computed(() => flowStore.isReadOnly)
const isAllowedEdit = computed(() => flowStore.isAllowedEdit)
const flowHaveTasks = computed(() => flowStore.flowHaveTasks)
const flowErrors = computed(() => flowStore.flowErrors?.map(translateError));
const flowInfos = computed(() => flowStore.flowInfos)
const toast = useToast();
const flowWarnings = computed(() => {
const outdatedWarning =
flowStore.flowValidation?.outdated && !flowStore.isCreating
? [translateErrorWithKey(flowStore.flowValidation?.constraints ?? "")]
? flowStore.flowValidation?.constraints?.split(", ") ?? []
: [];
const deprecationWarnings =

View File

@@ -1,22 +0,0 @@
import {useI18n} from "vue-i18n";
export function useFlowOutdatedErrors(){
const {t} = useI18n();
function translateError(error: string): string {
if(error.startsWith(">>>>")){
const key = error.substring(4).trim();
return translateErrorWithKey(key);
} else {
return error;
}
}
function translateErrorWithKey(key: string): string {
return `${t(key + ".description")} ${t(key + ".details")}`
}
return {
translateError,
translateErrorWithKey
}
}

View File

@@ -1,4 +1,3 @@
<template>
<el-form labelPosition="top" class="w-100">
<template v-if="sortedProperties">
@@ -11,7 +10,7 @@
</template>
<el-collapse v-model="activeNames" v-if="requiredProperties.length && (optionalProperties?.length || deprecatedProperties?.length || connectionProperties?.length)" class="collapse">
<el-collapse-item name="connection" v-if="connectionProperties?.length" :title="t('no_code.sections.connection')">
<el-collapse-item name="connection" v-if="connectionProperties?.length" :title="$t('no_code.sections.connection')">
<template v-for="[fieldKey, fieldSchema] in connectionProperties" :key="fieldKey">
<Wrapper>
<template #tasks>
@@ -20,7 +19,7 @@
</Wrapper>
</template>
</el-collapse-item>
<el-collapse-item name="optional" v-if="optionalProperties?.length" :title="t('no_code.sections.optional')">
<el-collapse-item name="optional" v-if="optionalProperties?.length" :title="$t('no_code.sections.optional')">
<template v-for="[fieldKey, fieldSchema] in optionalProperties" :key="fieldKey">
<Wrapper>
<template #tasks>
@@ -30,7 +29,7 @@
</template>
</el-collapse-item>
<el-collapse-item name="deprecated" v-if="deprecatedProperties?.length" :title="t('no_code.sections.deprecated')">
<el-collapse-item name="deprecated" v-if="deprecatedProperties?.length" :title="$t('no_code.sections.deprecated')">
<template v-for="[fieldKey, fieldSchema] in deprecatedProperties" :key="fieldKey">
<Wrapper>
<template #tasks>
@@ -58,7 +57,6 @@
<script setup lang="ts">
import {computed, inject, ref} from "vue";
import {useI18n} from "vue-i18n";
import TaskDict from "./TaskDict.vue";
import Wrapper from "./Wrapper.vue";
import TaskObjectField from "./TaskObjectField.vue";
@@ -86,8 +84,6 @@
(e: "update:modelValue", value: Model): void;
}>();
const {t} = useI18n();
const activeNames = ref<string[]>([]);
const FIRST_FIELDS = ["id", "forced", "on", "field", "type"];

View File

@@ -2,14 +2,13 @@
<InputText
:modelValue="modelValue"
:disabled
:placeholder="disabled ? t('no_code.version_oss_placeholder') : undefined"
:placeholder="disabled ? $t('no_code.version_oss_placeholder') : undefined"
class="w-100"
/>
</template>
<script setup lang="ts">
import {computed} from "vue";
import {useI18n} from "vue-i18n";
import InputText from "../inputs/InputText.vue";
import {useMiscStore} from "override/stores/misc";
@@ -17,8 +16,6 @@
const disabled = computed(() => miscStore.configs?.edition === "OSS")
const {t} = useI18n()
const modelValue = defineModel<string>({default: ""})
defineOptions({

View File

@@ -1,6 +1,6 @@
<template>
<TopNavBar :title="routeInfo.title" :breadcrumb="routeInfo?.breadcrumb" />
<template v-if="!pluginIsSelected">
<template v-if="isPluginList">
<PluginHome v-if="filteredPlugins" :plugins="filteredPlugins" />
</template>
<DocsLayout v-else>
@@ -133,8 +133,9 @@
const releaseNotesUrl = computed(() => getPluginReleaseUrl(pluginType.value));
const pluginIsSelected = computed(
() => pluginType.value !== undefined && pluginsStore.plugin !== undefined
const isPluginList = computed(
() => typeof route.name === "string" && route.name === "plugins/list"
);
function loadToc() {

View File

@@ -648,6 +648,7 @@
},
async saveAllSettings() {
let refreshWhenSaved = false
const previousDefaultNamespace = localStorage.getItem("defaultNamespace")
for (const key in this.pendingSettings){
const storedKey = this.settingsKeyMapping[key]
switch(key) {
@@ -706,11 +707,36 @@
this.originalSettings = JSON.parse(JSON.stringify(this.pendingSettings));
this.hasUnsavedChanges = false;
this.checkDefaultStates();
// Clear namespace filters from sessionStorage if default namespace changed/cleared
if (previousDefaultNamespace !== this.pendingSettings.defaultNamespace) {
this.clearNamespaceFilters();
}
if(refreshWhenSaved){
document.location.assign(document.location.href)
}
this.$toast().saved(this.$t("settings.label"), undefined, {multiple: true});
},
clearNamespaceFilters() {
Object.keys(sessionStorage)
.filter(key => key.includes("_restore_url"))
.forEach(key => {
const value = sessionStorage.getItem(key);
if (!value) return;
const filters = JSON.parse(value);
const updated = Object.fromEntries(
Object.entries(filters).filter(([k]) => k !== "namespace" && !k.startsWith("filters[namespace]"))
);
if (Object.keys(updated).length) {
sessionStorage.setItem(key, JSON.stringify(updated));
} else {
sessionStorage.removeItem(key);
}
});
},
updateThemeBasedOnSystem() {
if (this.theme === "syncWithSystem") {
Utils.switchTheme(this.miscStore, "syncWithSystem");

View File

@@ -61,6 +61,10 @@ export const useBaseNamespacesStore = () => {
return response.data;
}
async function update(this: any, _: {route: any, payload: any}) {
// NOOP IN OSS
}
async function loadDependencies(this: any, options: {namespace: string}) {
return await axios.get(`${apiUrl()}/namespaces/${options.namespace}/dependencies`);
}
@@ -283,6 +287,7 @@ export const useBaseNamespacesStore = () => {
search,
total,
load,
update,
loadDependencies,
existing,
namespace,

View File

@@ -28,6 +28,10 @@ export class Me {
hasAnyRole() {
return true;
}
getNamespacesForAction(_permission: any, _action: any): string[] {
return [];
}
}
export const useAuthStore = defineStore("auth", {

View File

@@ -195,7 +195,7 @@ export const useFlowStore = defineStore("flow", () => {
return validateFlow({
flow: (isCreating.value ? flowYaml.value : yamlWithNextRevision.value) ?? ""
})
.then((value: {constraints?: any}) => {
.then((value: {constraints?: string}) => {
if (
topologyVisible &&
flowHaveTasks.value &&
@@ -566,7 +566,7 @@ function deleteFlowAndDependencies() {
coreStore.message = {
title: "Couldn't expand subflow",
message: error.response.data.message,
variant: "danger"
variant: "error"
};
}
@@ -644,19 +644,37 @@ function deleteFlowAndDependencies() {
function enableFlowByQuery(options: { namespace: string, id: string }) {
return axios.post(`${apiUrl()}/flows/enable/by-query`, options, {params: options})
}
function deleteFlowByIds(options: { ids: {id: string, namespace: string}[] }) {
return axios.delete(`${apiUrl()}/flows/delete/by-ids`, {data: options.ids})
}
function deleteFlowByQuery(options: { namespace: string, id: string }) {
return axios.delete(`${apiUrl()}/flows/delete/by-query`, {params: options})
}
function validateFlow(options: { flow: string }) {
const flowValidationIssues: FlowValidations = {};
if(isCreating.value) {
const {namespace} = YAML_UTILS.getMetadata(options.flow);
if(authStore.user && !authStore.user.isAllowed(
permission.FLOW,
action.CREATE,
namespace,
)) {
flowValidationIssues.constraints = t("flow creation denied in namespace", {namespace});
}
}
return axios.post(`${apiUrl()}/flows/validate`, options.flow, {...textYamlHeader, withCredentials: true})
.then(response => {
flowValidation.value = response.data[0]
return response.data[0]
const constraintsArray = [response?.data[0]?.constraints, flowValidationIssues.constraints].filter(Boolean)
flowValidation.value = constraintsArray.length === 0 ? {} : {
constraints: constraintsArray.join(", ")
};
return flowValidation.value
})
}
function validateTask(options: { task: string, section: string }) {
return axios.post(`${apiUrl()}/flows/validate/task`, options.task, {...textYamlHeader, withCredentials: true, params: {section: options.section}})
.then(response => {
@@ -752,7 +770,8 @@ function deleteFlowAndDependencies() {
return false;
}
return authStore.user.isAllowed(
return (isCreating.value && authStore.user.hasAnyAction(permission.FLOW, action.UPDATE))
|| authStore.user.isAllowed(
permission.FLOW,
action.UPDATE,
flow.value?.namespace,
@@ -777,9 +796,10 @@ function deleteFlowAndDependencies() {
})
const flowErrors = computed((): string[] | undefined => {
const key = baseOutdatedTranslationKey.value;
const flowExistsError =
flowValidation.value?.outdated && isCreating.value
? [`>>>>${baseOutdatedTranslationKey.value}`] // because translating is impossible here
? [`${t(key + ".description")} ${t(key + ".details")}`]
: [];
const constraintsError =
@@ -794,8 +814,6 @@ function deleteFlowAndDependencies() {
const infos = flowValidation.value?.infos ?? [];
return infos.length === 0 ? undefined : infos;
return undefined;
})
const flowHaveTasks = computed((): boolean => {

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow existiert bereits",
"flow creation": "Flow-Erstellung",
"flow creation denied in namespace": "Sie haben keine Berechtigung, Flows im Namespace `{namespace}` zu erstellen.",
"flow delete": "Sind Sie sicher, dass Sie <code>{flowCount}</code> Flow(s) löschen möchten?",
"flow deleted, you can restore it": "Der Flow wurde gelöscht und dies ist eine schreibgeschützte Ansicht. Sie können ihn dennoch wiederherstellen.",
"flow disable": "Sind Sie sicher, dass Sie <code>{flowCount}</code> Flow(s) deaktivieren möchten?",

View File

@@ -574,6 +574,7 @@
"can not save": "Can not save",
"flow must not be empty": "Flow must not be empty",
"flow must have id and namespace": "Flow must have an id and a namespace.",
"flow creation denied in namespace": "You don't have permission to create flows in the namespace `{namespace}`.",
"readonly property": "Read-only property",
"namespace and id readonly": "The properties `namespace` and `id` cannot be changed — they are now set to their initial values. If you want to rename a flow or change its namespace, you can create a new flow and remove the old one.",
"avg": "Average",

View File

@@ -958,6 +958,7 @@
"flow": "Flujo",
"flow already exists": "Flow ya existe",
"flow creation": "Creación de flujo",
"flow creation denied in namespace": "No tienes permiso para crear flows en el namespace `{namespace}`.",
"flow delete": "¿Estás seguro de que quieres eliminar <code>{flowCount}</code> flow(s)?",
"flow deleted, you can restore it": "El flow ha sido eliminado y esta es una vista de solo lectura. Aún puedes restaurarlo.",
"flow disable": "¿Estás seguro de que quieres deshabilitar <code>{flowCount}</code> flow(s)?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow déjà existant",
"flow creation": "Création de flow",
"flow creation denied in namespace": "Vous n'avez pas la permission de créer des flows dans le namespace `{namespace}`.",
"flow delete": "Êtes vous sûr de vouloir supprimer <code>{flowCount}</code> flow(s)?",
"flow deleted, you can restore it": "Le flow a été supprimé et ceci est une vue en lecture seule, mais vous pouvez toujours le restaurer.",
"flow disable": "Êtes vous sûr de vouloir désactiver <code>{flowCount}</code> flow(s)?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow पहले से मौजूद है",
"flow creation": "Flow निर्माण",
"flow creation denied in namespace": "आपके पास namespace `{namespace}` में flows बनाने की अनुमति नहीं है।",
"flow delete": "क्या आप वाकई <code>{flowCount}</code> flow(s) को हटाना चाहते हैं?",
"flow deleted, you can restore it": "Flow हटा दिया गया है और यह एक केवल-पढ़ने का दृश्य है। आप इसे अभी भी पुनर्स्थापित कर सकते हैं।",
"flow disable": "क्या आप वाकई <code>{flowCount}</code> flow(s) को अक्षम करना चाहते हैं?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow già esistente",
"flow creation": "Creazione del flow",
"flow creation denied in namespace": "Non hai il permesso di creare flow nel namespace `{namespace}`.",
"flow delete": "Sei sicuro di voler eliminare <code>{flowCount}</code> flow?",
"flow deleted, you can restore it": "Il flow è stato eliminato e questa è una vista di sola lettura. Puoi ancora ripristinarlo.",
"flow disable": "Sei sicuro di voler disabilitare <code>{flowCount}</code> flow?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flowはすでに存在します",
"flow creation": "Flow作成",
"flow creation denied in namespace": "`{namespace}`でflowを作成する権限がありません。",
"flow delete": "<code>{flowCount}</code>件のflowを削除してもよろしいですか",
"flow deleted, you can restore it": "Flowは削除されており、これは読み取り専用ビューです。復元することは可能です。",
"flow disable": "<code>{flowCount}</code>件のflowを無効化してもよろしいですか",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow가 이미 존재합니다",
"flow creation": "Flow 생성",
"flow creation denied in namespace": "`{namespace}` namespace에서 flow를 생성할 권한이 없습니다.",
"flow delete": "<code>{flowCount}</code> 개의 flow를 삭제하시겠습니까?",
"flow deleted, you can restore it": "Flow가 삭제되었으며 읽기 전용 보기입니다. 여전히 복원할 수 있습니다.",
"flow disable": "<code>{flowCount}</code> 개의 flow를 비활성화하시겠습니까?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow już istnieje",
"flow creation": "Utworzenie flow",
"flow creation denied in namespace": "Nie masz uprawnień do tworzenia flow w namespace `{namespace}`.",
"flow delete": "Czy na pewno chcesz usunąć <code>{flowCount}</code> flow(s)?",
"flow deleted, you can restore it": "Flow został usunięty i jest to widok tylko do odczytu. Nadal możesz go przywrócić.",
"flow disable": "Czy na pewno chcesz wyłączyć <code>{flowCount}</code> flow(s)?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow já existe",
"flow creation": "Criação de flow",
"flow creation denied in namespace": "Você não tem permissão para criar flows no namespace `{namespace}`.",
"flow delete": "Tem certeza de que deseja deletar <code>{flowCount}</code> flow(s)?",
"flow deleted, you can restore it": "O flow foi deletado e esta é uma visualização somente leitura. Você ainda pode restaurá-lo.",
"flow disable": "Tem certeza de que deseja desativar <code>{flowCount}</code> flow(s)?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow já existe",
"flow creation": "Criação de flow",
"flow creation denied in namespace": "Você não tem permissão para criar flows no namespace `{namespace}`.",
"flow delete": "Tem certeza de que deseja excluir <code>{flowCount}</code> flow(s)?",
"flow deleted, you can restore it": "O flow foi deletado e esta é uma visualização somente leitura. Você ainda pode restaurá-lo.",
"flow disable": "Tem certeza de que deseja desativar <code>{flowCount}</code> flow(s)?",

View File

@@ -958,6 +958,7 @@
"flow": "Flow",
"flow already exists": "Flow уже существует",
"flow creation": "Создание flow",
"flow creation denied in namespace": "У вас нет разрешения на создание flows в namespace `{namespace}`.",
"flow delete": "Вы уверены, что хотите удалить <code>{flowCount}</code> flow(ы)?",
"flow deleted, you can restore it": "Flow был удален, и это представление только для чтения. Вы все еще можете его восстановить.",
"flow disable": "Вы уверены, что хотите отключить <code>{flowCount}</code> flow(ы)?",

View File

@@ -958,6 +958,7 @@
"flow": "流程",
"flow already exists": "流程已存在",
"flow creation": "流程创建",
"flow creation denied in namespace": "您没有权限在namespace `{namespace}`中创建flows。",
"flow delete": "确定要删除 <code>{flowCount}</code> 个流程吗?",
"flow deleted, you can restore it": "流程已删除,这是只读视图。你仍然可以恢复它。",
"flow disable": "确定要禁用 <code>{flowCount}</code> 个流程吗?",

View File

@@ -38,9 +38,6 @@ import io.kestra.core.utils.ListUtils;
import io.kestra.core.utils.Logs;
import io.kestra.plugin.core.flow.Pause;
import io.kestra.plugin.core.trigger.Webhook;
import io.kestra.webserver.controllers.api.ExecutionController.ApiValidateExecutionInputsResponse;
import io.kestra.webserver.controllers.api.ExecutionController.ApiValidateExecutionInputsResponse.ApiCheckFailure;
import io.kestra.webserver.controllers.api.ExecutionController.ExecutionResponse;
import io.kestra.webserver.converters.QueryFilterFormat;
import io.kestra.webserver.responses.BulkErrorResponse;
import io.kestra.webserver.responses.BulkResponse;
@@ -590,21 +587,16 @@ public class ExecutionController {
throw new HttpStatusException(HttpStatus.NOT_FOUND, "No execution triggered");
}
var result = execution.get();
List<Label> labels = new ArrayList<>();
labels.add(new Label(Label.FROM, "trigger"));
if (flow.getLabels() != null) {
result = result.withLabels(LabelService.labelsExcludingSystem(flow));
labels.addAll(LabelService.labelsExcludingSystem(flow));
}
if (labels.stream().noneMatch(label -> label.key().equals(CORRELATION_ID))) {
labels.add(new Label(CORRELATION_ID, execution.get().getId()));
}
List<Label> labels = ListUtils.emptyOnNull(result.getLabels());
boolean hasCorrelationId = labels.stream()
.anyMatch(label -> label.key().equals(CORRELATION_ID));
if (!hasCorrelationId) {
List<Label> newLabels = new ArrayList<>(labels);
newLabels.add(new Label(CORRELATION_ID, result.getId()));
result = result.withLabels(newLabels);
}
var result = execution.get().withLabels(labels);
// we check conditions here as it's easier as the execution is created we have the body and headers available for the runContext
var conditionContext = conditionService.conditionContext(runContextFactory.of(flow, result), flow, result);
@@ -636,7 +628,7 @@ public class ExecutionController {
}
executionQueue.emit(result);
eventPublisher.publishEvent(new CrudEvent<>(result, CrudEventType.CREATE));
eventPublisher.publishEvent(CrudEvent.create(result));
if (webhook.getWait()) {
var subscriberId = UUID.randomUUID().toString();
@@ -864,15 +856,22 @@ public class ExecutionController {
}
protected List<Label> parseLabels(List<String> labels) {
List<Label> parsedLabels = labels == null ? Collections.emptyList() : RequestUtils.toMap(labels).entrySet().stream()
List<Label> parsedLabels = labels == null ? new ArrayList<>() : RequestUtils.toMap(labels).entrySet().stream()
.map(entry -> new Label(entry.getKey(), entry.getValue()))
.toList();
.collect(Collectors.toList());
// check for system labels: none can be passed at execution creation time except system.correlationId
Optional<Label> first = parsedLabels.stream().filter(label -> !label.key().equals(CORRELATION_ID) && label.key().startsWith(SYSTEM_PREFIX)).findFirst();
// check for system labels: none can be passed at execution creation time except system.correlationId and system.from
Optional<Label> first = parsedLabels.stream().filter(label -> !label.key().equals(CORRELATION_ID) && !label.key().equals(Label.FROM) && label.key().startsWith(SYSTEM_PREFIX)).findFirst();
if (first.isPresent()) {
throw new IllegalArgumentException("System labels can only be set by Kestra itself, offending label: " + first.get().key() + "=" + first.get().value());
}
// from can be passed by the UI so we only add it if it didn't exist anymore
// if we want to be more restrictive, we may want to restrict it to only have the `ui` value
if (parsedLabels.stream().noneMatch(l -> l.key().equals(Label.FROM))) {
parsedLabels.add(new Label(Label.FROM, "api"));
}
return parsedLabels;
}

View File

@@ -53,7 +53,6 @@ import org.awaitility.Awaitility;
import org.hamcrest.Matcher;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.RetryingTest;
import reactor.core.publisher.Flux;
import java.io.ByteArrayInputStream;
@@ -170,12 +169,15 @@ class ExecutionControllerRunnerTest {
assertThat(result.getInputs().get("file").toString()).startsWith("kestra:///io/kestra/tests/inputs/executions/");
assertThat(result.getInputs().containsKey("bool")).isTrue();
assertThat(result.getInputs().get("bool")).isNull();
assertThat(result.getLabels().size()).isEqualTo(6);
assertThat(result.getLabels().getFirst()).isEqualTo(new Label("flow-label-1", "flow-label-1"));
assertThat(result.getLabels().get(1)).isEqualTo(new Label("flow-label-2", "flow-label-2"));
assertThat(result.getLabels().get(2)).isEqualTo(new Label("a", "label-1"));
assertThat(result.getLabels().get(3)).isEqualTo(new Label("b", "label-2"));
assertThat(result.getLabels().get(4)).isEqualTo(new Label("url", URL_LABEL_VALUE));
assertThat(result.getLabels()).containsExactlyInAnyOrder(
new Label("flow-label-1", "flow-label-1"),
new Label("flow-label-2", "flow-label-2"),
new Label("a", "label-1"),
new Label("b", "label-2"),
new Label("url", URL_LABEL_VALUE),
new Label(Label.CORRELATION_ID, result.getId()),
new Label(Label.FROM, "api")
);
var notFound = assertThrows(HttpClientResponseException.class, () -> client.toBlocking().exchange(
HttpRequest
@@ -203,6 +205,7 @@ class ExecutionControllerRunnerTest {
assertThat(execution.getLabels()).containsExactlyInAnyOrder(
new Label(Label.CORRELATION_ID, execution.getId()),
new Label(Label.FROM, "api"),
new Label("existing", "fromExecution")
);
}
@@ -908,8 +911,12 @@ class ExecutionControllerRunnerTest {
assertThat((Boolean) ((Map<String, Object>) execution.getTrigger().getVariables().get("body")).get("b")).isTrue();
assertThat(((Map<String, Object>) execution.getTrigger().getVariables().get("parameters")).get("name")).isEqualTo(List.of("john"));
assertThat(((Map<String, List<String>>) execution.getTrigger().getVariables().get("parameters")).get("age")).containsExactlyInAnyOrder("12", "13");
assertThat(execution.getLabels().getFirst()).isEqualTo(new Label("flow-label-1", "flow-label-1"));
assertThat(execution.getLabels().get(1)).isEqualTo(new Label("flow-label-2", "flow-label-2"));
assertThat(execution.getLabels()).containsExactlyInAnyOrder(
new Label(Label.CORRELATION_ID, execution.getId()),
new Label(Label.FROM, "trigger"),
new Label("flow-label-1", "flow-label-1"),
new Label("flow-label-2", "flow-label-2")
);
execution = client.toBlocking().retrieve(
HttpRequest