Compare commits

..

221 Commits

Author SHA1 Message Date
github-actions[bot]
bb54e3465a chore(version): update to version '0.23.23' 2025-11-10 14:31:49 +00:00
github-actions[bot]
add33850a0 chore(version): update to version '0.23.22' 2025-11-03 11:12:43 +00:00
Roman Acevedo
b8838f489e ci: add dry run and default plugin-version for release docker 2025-10-31 20:20:37 +01:00
Loïc Mathieu
f54f6e7c7e fix(executions): Flow triggered twice when there are two multiple conditions
Fixes #12560
2025-10-31 16:27:06 +01:00
Roman Acevedo
3efb95bae2 ci: add skip test param to pre-release.yml 2025-10-28 17:59:43 +01:00
github-actions[bot]
fcff4019d7 chore(version): update to version '0.23.21' 2025-10-16 13:42:24 +00:00
github-actions[bot]
9b89202fe7 chore(version): update to version '0.23.20' 2025-10-14 12:31:33 +00:00
brian-mulier-p
6cb3e6b9c4 fix(flows): pebble autocompletion performance optimization (#11981)
closes #11881
2025-10-14 11:41:12 +02:00
nKwiatkowski
0f1e3586d9 feat(tests): add flaky tests handling 2025-10-13 17:08:50 +02:00
github-actions[bot]
f6e098849e chore(version): update to version '0.23.19' 2025-10-07 13:20:33 +00:00
Roman Acevedo
2494b6f335 test: make listDistinctNamespaces more maintainable 2025-10-07 15:15:41 +02:00
Roman Acevedo
47d12959ae test: remove findByNamespace and findDistinctNamespace
they are too hard to maintain
2025-10-07 11:11:02 +02:00
Roman Acevedo
cc61df263d ci: migrate CI to kestra-io/actions #11769 2025-10-07 10:04:45 +02:00
Loïc Mathieu
5738acd10e fix: compilation issue 2025-10-06 14:39:38 +02:00
Loïc Mathieu
a8539bf579 fix(executions): purge executions by 100 by default
As 500 may be too much if executions are huge as the batch will be loaded in memory.
2025-10-03 17:00:03 +02:00
Loïc Mathieu
081af2e488 fix(system): compilation issue 2025-10-03 16:13:38 +02:00
Loïc Mathieu
44bb0c2112 feat(executions): improve performance of PurgeExecutions by batch deleting executions, logs and metrics
Closes #11680
2025-10-03 16:04:29 +02:00
Loïc Mathieu
eebc4a3cf6 fix(executions): evaluate multiple conditions in a separate queue
By evaluating multiple condition in a separate queue, we serialize their evaluation which avoir races when we compute the outputs for flow triggers.
This is because evaluation is a multi step process: first you get the existing condtion, then you evaluate, then you store the result. As this is not guarded by a lock you must not do it concurrently.

The race can still occurs if muiltiple executors run but this is less probable. A re-implementation would be needed probably in 2.0 for that.

Fixes https://github.com/kestra-io/kestra-ee/issues/4602
2025-10-03 11:03:33 +02:00
brian-mulier-p
9e50a31ad4 fix(core): avoid crashing UI in case of multiline function autocomplete (#11684) 2025-10-03 09:37:56 +02:00
Loïc Mathieu
d73ad3b365 fix(system): compilation issue 2025-10-01 12:29:05 +02:00
Loïc Mathieu
a28bba8168 chore(system): move the SkipExecution service to the services package
It was there before so it will be easier to backport the change if it moves there.
2025-10-01 11:34:13 +02:00
Loïc Mathieu
bf61b04038 feat(system): allow to skip an indexer record
Part-of: https://github.com/kestra-io/kestra-ee/issues/5263
2025-10-01 11:34:13 +02:00
github-actions[bot]
a016a58dda chore(version): update to version '0.23.18' 2025-09-30 08:26:56 +00:00
github-actions[bot]
7b431f6d71 chore(version): update to version '0.23.17' 2025-09-23 12:50:57 +00:00
nKwiatkowski
b7846f42c6 chore(version): update to version '0.23.16' 2025-09-23 14:50:31 +02:00
Bart Ledoux
2ded421c4f tests(core): make one test pass 2025-09-23 14:42:47 +02:00
github-actions[bot]
95f6faf557 chore(version): update to version '0.23.17' 2025-09-23 12:34:27 +00:00
nKwiatkowski
e23b0d1f28 chore(version): update to version '0.23.16' 2025-09-23 14:33:53 +02:00
github-actions[bot]
c630d548f6 chore(core): localize to languages other than english (#11462)
Co-authored-by: GitHub Action <actions@github.com>
2025-09-23 13:24:38 +02:00
Bart Ledoux
516bf76816 Revert "chore(i18n): 👿 add changes to translations for automation"
This reverts commit 495e480bde.
2025-09-23 13:21:50 +02:00
Bart Ledoux
495e480bde chore(i18n): 👿 add changes to translations for automation 2025-09-23 13:20:42 +02:00
Bart Ledoux
0a5509b884 chore: split ui unit test config file 2025-09-23 13:04:34 +02:00
github-actions[bot]
e12f51c386 chore(version): update to version '0.23.17' 2025-09-23 09:41:25 +00:00
brian.mulier
c969eff20a fix(system): avoid trigger locking after scheduler restart
closes #11434
2025-09-22 18:37:55 +02:00
brian.mulier
29db1efc40 fix(ci): same CI as develop 2025-09-22 18:37:50 +02:00
brian-mulier-p
21d8a71255 fix(tests): enforce closing consumers after each tests (#11399) 2025-09-19 16:32:11 +02:00
brian-mulier-p
44bcbe713b fix(core): avoid ClassCastException when doing secret decryption (#11393)
closes kestra-io/kestra-ee#5191
2025-09-19 13:51:01 +02:00
github-actions[bot]
708ffaab38 chore(version): update to version '0.23.16' 2025-09-16 09:20:56 +00:00
Loïc Mathieu
569fc10d48 chore(version): upgrade to 0.23.15 2025-09-09 10:08:19 +02:00
Miloš Paunović
7c23461efd fix(flow)*: properly handle tab closing by clicking the cross icon in the corner of the panel (#11090)
Relates to https://github.com/kestra-io/kestra/issues/10981.
2025-09-04 14:22:12 +02:00
github-actions[bot]
b09b1fdafe chore(version): update to version '0.23.14' 2025-09-02 12:21:34 +00:00
Roman Acevedo
adcab1893b ci: fix setversion-tag.yml not triggering a main.yml job on a pushed tag
the missing token: ${{ secrets.GH_PERSONAL_TOKEN }} is the only difference between this CI and EE CI, so it is probably the right fix

# Conflicts:
#	.github/workflows/setversion-tag.yml
2025-09-02 10:40:47 +02:00
Loïc Mathieu
5710c79954 fix(executions): clear errors/finally/afterExecution branches when changing the state of a taskrun
As changing the state of a taskrun will restart the flow, if we didn't clear those branches, the flow would not resart properly.

Fixes https://github.com/kestra-io/kestra-ee/issues/3211
2025-08-29 16:25:29 +02:00
github-actions[bot]
55a2384253 chore(version): update to version '0.23.13' 2025-08-26 10:35:39 +00:00
brian.mulier
4975c907a7 fix(logs): emitAsync is now keeping messages order 2025-08-25 16:54:13 +02:00
brian.mulier
87d508648d fix(logs): higher max message length to keep stacktraces in a single log 2025-08-25 16:53:34 +02:00
brian.mulier
85da1089ec chore(deps): bump Micronaut platform to 4.9.2
closes #10626
closes #10788
2025-08-25 16:53:34 +02:00
Loïc Mathieu
68e1b9c80f fix(system): properly close the ScheduledExecutorService tasks
This avoids having running threads while the component is supposed to be closed.
2025-08-20 15:52:48 +02:00
nKwiatkowski
21c24e0349 fix: retry flaky test 2025-08-20 11:51:48 +02:00
nKwiatkowski
ed8a908b22 fix: retry flaky test 2025-08-20 11:50:49 +02:00
Nicolas K.
86d97bed77 fix(test): disable kafka concurrency queue test (#10755)
Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-08-19 18:12:59 +02:00
nKwiatkowski
f2c3cf4f8c chore: update version to 0.23.12 2025-08-19 15:17:56 +02:00
brian.mulier
b3c896eccb fix(core): change cache policy on files returned by webserver that needs to stay fresh
closes #7499
2025-08-19 13:13:53 +02:00
brian.mulier
58d36f5948 fix(dashboards): quick fix to avoid infinite refresh and restore refresh dashboard feature 2025-08-19 13:05:31 +02:00
Florian Hussonnois
922a655a4c fix(core): fix preconditions rendering for ExecutionOutputs (#10651)
Ensure that preconditions are always re-rendered for any
new executions

Changes:
* add new fluent skipCache methods on RunContextProperty and Property
  classes

Fix: #10651
2025-08-18 21:00:06 +02:00
YannC.
94f0e211ba fix: compilation issue 2025-08-14 15:45:19 +02:00
Loïc Mathieu
2b590bf955 fix(execution): parallel flowable may not ends all child flowable
Parallel flowable tasks like `Parallel`, `Dag` and `ForEach` are racy. When a task fail in a branch, other concurrent branches that have flowable may never ends.
We make sure that all children are terminated when a flowable is itself terminated.

Fixes #6780
2025-08-14 12:25:56 +02:00
Loïc Mathieu
b61eeaff8c fix(execution): concurrency limit didn't work with afterExecutions
This is because the execution is never considered fully terminated so concurrency limit is not handled properly.
This should also affect SLA, trigger lock, and other cleaning stuff.

The root issue is that, with a worker task from afterExecution, there are no other update on the execution itself (as it's already terminated) so no execution messages are again processed by the executor.

Because of that, the worker task result message from the afterExecution block is the last message, but unfortunatly as messages from the worker task result have no flow attached, the computation of the final termination is incorrect.
The solution is to load the flow if null inside the executor and the execution is terminated which should only occurs inside afterExecution.

Fixes #10657
Fixes #8459
Fixes #8609
2025-08-13 09:32:25 +02:00
Prayag
26d7fa47d3 fix(core): Enter key is now validating filter / refreshing data (#9630)
closes #9471

---------

Co-authored-by: brian.mulier <bmmulier@hotmail.fr>
2025-08-12 17:29:14 +02:00
Loïc Mathieu
bece420c9a fix(executions): SLA monitor should take into account restarted executions 2025-08-12 11:49:14 +02:00
Loïc Mathieu
081066888f fix(executions): concurrency limit exceeded when restarting an execution
Fixes #7880
2025-08-12 11:49:02 +02:00
YannC.
c721fe68a7 chore: update version to 0.23.11 2025-08-12 10:47:34 +02:00
Loïc Mathieu
88c77084f5 fix(executions): correctly fail the request when trying to resume an execution with the wrong inputs
Fixes #9959
2025-08-12 09:40:23 +02:00
brian.mulier
3846ac87e3 fix(dashboard): properly use time filters in queries
closes kestra-io/kestra-ee#4389
2025-08-11 22:29:56 +02:00
Nicolas K.
b9c843f01d feat(releases): add test jar to meven central deployment (#10675)
Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-08-11 17:17:06 +02:00
brian.mulier
9686718130 tests(core): add a test to taskrunners to ensure it's working multiple times on the same working directory
part of kestra-io/plugin-ee-kubernetes#45
2025-08-11 15:00:32 +02:00
Loïc Mathieu
da40f46b4a fix(executions): properly fail the task if it contains unsupported unicode sequence
This occurs in Postgres using the `\u0000` unicode sequence. Postgres refuse to store any JSONB with this sequence as it has no textual representation.
We now properly detect that and fail the task.

Fixes #10326
2025-08-11 11:54:35 +02:00
Piyush Bhaskar
4ae207ed81 fix(flows): copy trigger url propely. (#10645) 2025-08-08 13:14:49 +05:30
brian.mulier
0a2fa4d3b2 fix(core): ensure props with defaults are not marked as required in generated doc 2025-08-07 15:09:46 +02:00
brian.mulier
995d5c1ac2 fix(core): wrong @NotNull import leading to key not being marked as required
closes #9287
2025-08-07 15:09:46 +02:00
Loïc Mathieu
92bf135c02 fix(test): RestactCaseTest.restartFailedWithFinally() should use executionService.isTerminated() 2025-08-06 09:56:43 +02:00
brian.mulier
d318281342 chore(version): upgrade to version 0.23.10 2025-08-05 10:56:31 +02:00
Loïc Mathieu
3f68749276 fix(executions): Don't create outputs from the Subflow task when we didn't wait
As, well, if we didn't wait for the subflow execution, we cannot have access to its outputs.
2025-07-31 13:07:46 +02:00
brian.mulier
bc07dfbf1c fix(core): avoid follow execution from being discarded too early
closes #10472
closes #7623
2025-07-31 10:26:16 +02:00
Piyush Bhaskar
3b3aa495b0 fix(core): update running count and status of executions in concurrency (#10418) 2025-07-29 18:30:40 +05:30
Loïc Mathieu
a945780e4d chore(version): upgrade to version 0.23.9 2025-07-29 14:57:00 +02:00
Roman Acevedo
d512f86927 fix(cli): tenantService was injected directly, this is not working in cli 2025-07-29 14:42:33 +02:00
github-actions[bot]
7f355dd730 chore(core): localize to languages other than english (#10405)
Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference.

Co-authored-by: GitHub Action <actions@github.com>
2025-07-29 10:50:29 +02:00
Loïc Mathieu
ffa33b1a7a chore(version): version 0.23.8 2025-07-29 10:11:12 +02:00
Roman Acevedo
a5b4ec3b2e fix(triggers): bulk action on triggers did not take into account this is async (#10307) 2025-07-29 09:31:43 +02:00
Miloš Paunović
5585e9df47 fix(namespaces): make sure the namespace parameter is properly passed when reading a file (#10384)
Relates to https://github.com/kestra-io/kestra/issues/10363.
Relates to https://github.com/kestra-io/kestra-ee/issues/4514.
2025-07-29 09:20:29 +02:00
YannC
f8cb335a16 fix: set postgres and mysql queue offset as a bigint (#10344) 2025-07-28 16:28:47 +02:00
Loïc Mathieu
af9129f900 fix(core): compilation issue 2025-07-28 15:08:57 +02:00
yuri
177ba35e15 fix(core): amend misc label-related issues (#10044)
* fix(core): amend misc label-related issues

* re-enabled bulk update of label value
* re-enabled merging flow-execution labels by key
* made duplicated keys rejection readable
* forced multiple validations within `RequestUtils`
* ensured existing labels can be overriden
* added multiple tests validating complex scenarios

BREAKING CHANGE: switched from first to last label value override
BREAKING CHANGE: preventing empty key/value labels
BREAKING CHANGE: preventing whitespace in key

* fix(core): reflect feedback

* Deduplicated a list inside the `Labels` task.
* Worked around label mutation at `Worker`.
* Attempted to deduplicate labels within `Execution` as possible.

* fix(core): remove irrelevant changes
2025-07-28 11:31:19 +02:00
Florian Hussonnois
b99946deff fix(system): avoid potential NPE in ServiceLivenessManager (#10338)
Avoid a potential NPE in ServiceLivenessManager when
a local service is unregistered during shutdown before the liveness probe completes

Fix: #10338
2025-07-25 12:34:51 +02:00
Florian Hussonnois
19428ad344 fix(system): ignore state transition failure for indexer
Fix: kestra-io/kestra-ee#4474
2025-07-25 12:34:44 +02:00
Loïc Mathieu
162764ff0d fix(executions): flow concurrency limit not honors when executions are created at a high rate
This is due to the fact that we now process the execution queue concurrently so there is a race when counting currently running executions. This can be seen easily using a ForEachItem as it could create tens or hundreds of executions almost instantly leading to almost all those executions started as they would all see 0 executions running...

Using a dedicated execution running queue, as done in EE, would serialize the messages and fix the issue.

However, if using multiple executor instances and concurrency limit = 1, there is a theoretical race as no locks will be done if no execution is running. A max surge of executions could be as high as the number of executor but this race is less probable to happen in real world scenario.

Fixes #10167
2025-07-25 12:06:26 +02:00
Florian Hussonnois
ccd7b43b97 fix(core): fix search filtering based on endDate field (kestra-io/kestra-ee#4446)
Related-to: kestra-io/kestra-ee#4446
2025-07-25 11:14:35 +02:00
Loïc Mathieu
53f881ed60 fix(executions): race condition inside nested ForEach with concurrency
Fixes #10167
2025-07-25 09:46:46 +02:00
Piyush Bhaskar
0759aaeae8 fix(executions): update query parameter for state filtering (#10315) 2025-07-24 14:42:01 +05:30
Miloš Paunović
fc8b389d09 fix(executions): make sure outputs do not overflow over right drawer (#10238)
Closes https://github.com/kestra-io/kestra/issues/10232.
2025-07-24 10:40:22 +02:00
MilosPaunovic
8355eb191e chore(flows): show small execution charts on flow listing
Co-authored-by: YannC. <ycoornaert@kestra.io>
2025-07-24 10:34:32 +02:00
Piyush Bhaskar
50f72f8ea3 fix(core): check null uri (#10309) 2025-07-24 12:48:42 +05:30
Nicolas K.
ae14d980a4 fix(cli): #10062 add tenant to load flows properly at the startup (#10290)
* fix(cli): #10062 add tenant to load flows properly at the startup

* fix(cli): #10062 add fallback tenant to ee service

* fix(cli): #10062 use tenant id in all cli

---------

Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-07-23 15:36:42 +02:00
Florian Hussonnois
bc1a08b418 chore(version): bump to version '0.23.7' 2025-07-22 15:50:22 +02:00
Nicolas K.
e264c0b75d fix(pebble): #8953 add more flexible day number conversion method (#10205)
Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-07-22 14:59:24 +02:00
Piyush Bhaskar
fccbb6b648 fix(logs): update query filter to show logs ns and flowwise (#10248) 2025-07-22 18:04:27 +05:30
MilosPaunovic
a243c563d3 fix(namespaces)*: prevent overwriting namespace file content with undefined string
Relates to https://github.com/kestra-io/kestra-ee/issues/4439.
2025-07-22 14:28:28 +02:00
Loïc Mathieu
45ad1f6ee4 fix(tests): strengthen awaitExecution predicate
In some test situation, awaitExecution may receive old messages so we strenghten the predicate to be sure to wait for the correct execution: the one that ends successfully
2025-07-22 12:47:40 +02:00
Piyush Bhaskar
8359bfc680 fix(triggers): ensure clearing the selection. (#10245) 2025-07-22 16:00:31 +05:30
YannC.
30a808188c fix: handle label filter with and instead or for flow
close #4390
2025-07-22 09:42:06 +02:00
Loïc Mathieu
5121ceb63a fixx(system): compilation issue 2025-07-21 12:36:03 +02:00
Loïc Mathieu
1dae994910 fix(executions)*: restart with finally or afterExecution
When a flow fail and is restarted and contains either a finally or an afterExecution block, those are not resetted so the restart will skip all task and terminate the flow.
The fix will reset the status of those tasks so they are restarted.

Fixes #10155
2025-07-21 12:27:54 +02:00
Loïc Mathieu
26a82fce95 fix(executions): support unicode file name inside the internal storage
Fixes #9550
2025-07-21 12:27:25 +02:00
Bart Ledoux
a8584a8a33 feat(flows): add setting to disable hovers in editor 2025-07-17 10:43:05 +02:00
Piyush Bhaskar
5737216b34 fix(triggers): only updates the trigger that matches both flowId and triggerId (#10156) 2025-07-17 14:10:44 +05:30
YannC.
747c424f1f chore(version): upgrade to v0.23.6 2025-07-15 14:52:54 +02:00
brian-mulier-p
33bfc979c5 fix(core): trim expressions in select & multiselect to be able to use '|' instead of '>-' (#10017)
closes #10016
2025-07-09 16:39:02 +02:00
nKwiatkowski
58ceb66cfb chore(version): upgrade to v0.23.5 2025-07-08 15:18:25 +02:00
Loïc Mathieu
a08266593f fix(webserver)*: bulk set labels remove existing labels
FIxes #9764
2025-07-07 15:26:09 +02:00
Loïc Mathieu
d5d5f457b4 fix(system): force running after execution tasks even if the execution is killed
Fixes #9852
2025-07-07 12:41:31 +02:00
François Delbrayelle
cacac2239d fix(taskrunner): abstract task runner (#9769) 2025-07-04 09:50:08 +02:00
nKwiatkowski
5c45bd5eb5 feat(cicd): #4006 add javadoc and sources to cli release 2025-07-03 14:58:12 +02:00
Miloš Paunović
fdf126202c fix(namespaces)*: take pagination into account when browsing namespace flows (#9849)
Closes https://github.com/kestra-io/kestra/issues/9805.
2025-07-02 11:48:53 +02:00
Nicolas K.
0f3c745bb9 feat(cicd): #4006 change signing method (#9854)
Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-07-02 11:14:07 +02:00
YannC.
5a6a0ff3e3 chore(version): upgrade to v0.23.4 2025-07-01 17:56:42 +02:00
Loïc Mathieu
f5f88e18ce feat(cluster): persist maintenance mode in the database
Part-of: https://github.com/kestra-io/kestra-ee/issues/3735
2025-07-01 17:56:42 +02:00
Nicolas K.
12f521860e feat(cicd): #4006 migrate to maven central (#9807)
Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-07-01 16:19:11 +02:00
Nicolas K.
b6cf3e1f93 feat(cicd): #4006 migrate sonatype to maven central (#9803)
Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-07-01 15:02:05 +02:00
YannC.
7125885ea9 fix(triggers): correctly replace the update triggers when disabling 2025-07-01 14:21:22 +02:00
YannC.
0b29a4a247 feat(triggers): avoid clearing selection when refreshing in triggers list 2025-07-01 14:21:22 +02:00
Piyush Bhaskar
0377f87c66 feat(tenant): all routes on /main tenant 2025-07-01 11:57:13 +05:30
Loïc Mathieu
06bd0c6380 fix(system)*: mitigate possible deadlock for execution delay and SLA
In case multiple instances of the executor are started, the execution delay loop and the monitoring SLA loop have a risk of duplicate execution resume or execution SLA violation computation.
This could create some race conditions and duplicate execution update.
But this may also risk to create some deadlocks as two instances of the executor may try to lock the same exection to restart it (or fail it due to SLA).
2025-06-30 14:33:54 +02:00
brian.mulier
cd39995f24 fix(core): use namespace prefix instead of equals
On the namespace/flows, namespace/executions pages and when having a default namespace on Logs page

closes kestra-io/kestra-ee#4200
2025-06-25 17:48:54 +02:00
Loïc Mathieu
938e156bd5 chore(system): call the close runnable later 2025-06-25 14:37:46 +02:00
brian.mulier
1fb7943738 chore(version): update to version '0.23.3' 2025-06-24 17:33:04 +02:00
brian-mulier-p
09d648cf86 fix(variables): put fixtures files with arbitrary key and extract it back as root level "files" variable (#9689) 2025-06-24 17:32:37 +02:00
brian.mulier
02a22faed4 chore(version): update to version '0.23.2' 2025-06-24 14:19:20 +02:00
Ludovic DEHON
169d6610f5 test(core): fix falling test on schedule 2025-06-24 14:19:20 +02:00
Loïc Mathieu
e253958cf4 fix(system): possible NPE on trigger when computing variables 2025-06-24 14:19:20 +02:00
brian-mulier-p
c75f06a036 fix: avoid failure to deserialize json objects that have unknown fields with http client (#9668)
closes #9667
2025-06-24 14:19:20 +02:00
Loïc Mathieu
b3b1b7a5cb feat(executions)*: add tasks to set and unset execution variables
Closes #9555
2025-06-24 14:19:20 +02:00
Loïc Mathieu
34e07b9e2b fix(execution): parent flow never ends when subflow fail due to SLA
This is because the executor didn't have the flow inside it so the execution is not correctly terminated.
It may fix other issues (like flow triggers, purge, ...)

Fixes #9618
2025-06-20 18:04:12 +02:00
Loïc Mathieu
85b449c926 fix(system): flow graph fail to be created while editting a flow
Fixes #9551

It is not the validation per se that fail, it's the graph dependency computation that is also done while editing a flow that fail.
2025-06-20 12:09:18 +02:00
Loïc Mathieu
0017ead9b3 fix(system)*: runIf inside a WorkingDirectory can crash the Worker
Fixes #9639
2025-06-20 12:09:04 +02:00
Barthélémy Ledoux
b0292f02f7 fix(ui): default value for expression cannot be null (#9636) 2025-06-20 11:12:32 +02:00
Piyush Bhaskar
202dc7308d feat(namespaces): show ns description (#9610)
* feat(namespaces): show ns description

* add slot and data for description
2025-06-20 13:59:03 +05:30
François Delbrayelle
3273a9a40c fix(plugin-versioning): replace current JAR if more recent (#9629) 2025-06-20 09:51:21 +02:00
Loïc Mathieu
bd303f4529 fix(system): support allowFailure and allowWarning for the Pause task
Fixes #9416
2025-06-19 17:34:38 +02:00
Barthélémy Ledoux
db57326f0f tests: nocode editor (#9624) 2025-06-19 14:21:15 +02:00
github-actions[bot]
90a576490f chore(version): update to version '0.23.1' 2025-06-19 10:32:53 +00:00
Loïc Mathieu
2cdd968100 feat(system): store version in the settings 2025-06-19 12:23:20 +02:00
Barthélémy Ledoux
adfc3bf526 perf(ui): load a sample schema while waiting (#9558) 2025-06-19 11:34:15 +02:00
Nicolas K.
3a61f9b1ba Fix/tutorial flows with migration (#9620)
* fix(core): #9609 delete tutorial flows and triggers before migrating the database

* fix(core): #9609 delete tutorial flows and triggers before migrating the database for EE version

---------

Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-06-19 10:58:29 +02:00
YannC
64e3014426 fix: correctly use default tenant when synchronizing file with local (#9605)
close #9568
2025-06-19 10:04:58 +02:00
François Delbrayelle
1f68e5f4ed fix(podman): do not pass the tag directly to pullImageCmd (withTag) (#9607) 2025-06-18 18:50:54 +02:00
François Delbrayelle
9bfa888e36 fix(plugin): FileSystems.newFileSystem caused a Path component should be / in plugins tests (#9570) 2025-06-18 16:03:45 +02:00
github-actions[bot]
691a77538a chore(version): update to version '0.23.0' 2025-06-17 09:35:23 +00:00
Bart Ledoux
b07086f553 chore: update ui-libs 2025-06-17 11:21:21 +02:00
Ludovic DEHON
ee12c884e9 fix(tasks): sleep example are a full one 2025-06-16 15:02:34 +02:00
Barthélémy Ledoux
712d6da84f fix(ui): make file panel appear beside main panel in namespace (#9546) 2025-06-16 14:45:05 +02:00
Bart Ledoux
fcc5fa2056 fix: package-lock 2025-06-16 14:44:01 +02:00
Loïc Mathieu
dace30ded7 fix(system): compilation issue 2025-06-16 14:18:55 +02:00
github-actions[bot]
2b578f0f94 chore(version): update to version '0.23.0-rc5-SNAPSHOT' 2025-06-16 12:05:27 +00:00
Florian Hussonnois
91f958b26b fix(executor): delete WorkerJobRunning for any terminated task (#9493)
Make ExecutorService responsible for deleting WorkerJobRunning
when a terminated TaskRun is added to an execution.

Changes:
 - Remove unecessary read before delete on WorkerJobRunning table.

Close: #9493
2025-06-16 14:03:11 +02:00
Bart Ledoux
d7fc6894fe tests: fix storybook tests 2025-06-16 13:29:34 +02:00
Bart Ledoux
c286348d27 fix(ui): make array and KV Pairs work in nocode 2025-06-16 12:17:23 +02:00
brian.mulier
de4ec49721 fix(core): yaml utils migration 2025-06-16 11:18:47 +02:00
Barthélémy Ledoux
1966ac6012 fix: cleanup empty metadata to fix variable creation (#9529) 2025-06-16 11:17:52 +02:00
Barthélémy Ledoux
a293a37ec9 fix(ui): nocode API calls on EE needs tenant (#9527) 2025-06-16 11:17:43 +02:00
Barthélémy Ledoux
f295724bb6 fix: small tweaks on tabs (#9520) 2025-06-16 11:17:34 +02:00
Barthélémy Ledoux
06505ad977 fix(ui): snafu on duplicate input pair (#9514) 2025-06-16 11:15:30 +02:00
Barthélémy Ledoux
cb31ef642f fix(ui): [nocode] make dag tasks work (#9506) 2025-06-16 11:14:17 +02:00
Barthélémy Ledoux
c320323371 fix(ui): nocode updating inputs from yaml (#9430) 2025-06-16 11:12:35 +02:00
Barthélémy Ledoux
a190cdd0e7 fix(ui): add datepicker to nocode string field (#9351)
Co-authored-by: GitHub Action <actions@github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-06-16 11:12:27 +02:00
Barthélémy Ledoux
0678f7c5e9 fix(ui): rename namespace field (#9492) 2025-06-16 11:08:05 +02:00
Barthélémy Ledoux
f39ba5c95e fix(ui): prevent cursor change in Editor component when modelValue is updated from outside (#9371) 2025-06-16 11:07:55 +02:00
Karuna Tata
b4e334c5d8 feat(ui): drag and convert tabs to panels (#9198)
Co-authored-by: Barthélémy Ledoux <bledoux@kestra.io>
2025-06-16 11:07:37 +02:00
Bart Ledoux
561380c942 fix(ui): restore add button as a button 2025-06-16 11:07:25 +02:00
Satvik Kushwaha
68b4867b5a fix(ui): make download and preview visible for text ouputs (#8348)
Co-authored-by: Barthélémy Ledoux <ledouxb@me.com>
2025-06-16 11:06:24 +02:00
Barthélémy Ledoux
cb7f99d107 fix(ui): variables should work with duplicated keys (#9425) 2025-06-16 11:05:17 +02:00
Barthélémy Ledoux
efac7146ff fix: properly detect condition fields (#9353) 2025-06-16 11:02:41 +02:00
Barthélémy Ledoux
11de42c0b8 fix(ui): nocode - open onPause in a new tab (#9366) 2025-06-16 11:02:31 +02:00
Barthélémy Ledoux
b58d9e10dd fix: initialize array fields without any value (#9367) 2025-06-16 11:00:04 +02:00
Barthélémy Ledoux
e25e70d37e refactor: load nocode root form from server schema (#9327) 2025-06-16 10:59:53 +02:00
Karuna Tata
f2dac28997 fix(ui): clear selection of retry form radio buttons (#9268)
Co-authored-by: Barthélémy Ledoux <ledouxb@me.com>
thank you so much for this geat work ! ❤️
2025-06-16 10:59:44 +02:00
Barthélémy Ledoux
0ac8819d95 fix(ui): allow key of sub-tasks to be other than tasks (#9333) 2025-06-16 10:59:24 +02:00
Ludovic DEHON
d261de0df3 fix(core): robots.txt was not served
close kestra-io/kestra#9015
2025-06-13 23:01:48 +02:00
brian.mulier
02cac65614 fix(core): filters was triggering endless refresh
closes #9508
2025-06-13 16:25:34 +02:00
MilosPaunovic
5064687b7e fix(core)*: make sure tour always opens with code & topology tabs visible (#9513)
Closes https://github.com/kestra-io/kestra-ee/issues/4073.
2025-06-13 08:55:20 +02:00
YannC
7c8419b266 fix(ui): Better duplicate key management in the pair component (#9431)
* fix(ui): Better duplicate key mananage in the pair component

close #9220

* fix(ui): add a have-error prop on inputText that show a red shadow

* refactor: simplify inputpair component (#9491)

* fix: only show lock if disabled

* alertState define order

---------

Co-authored-by: Barthélémy Ledoux <bledoux@kestra.io>
2025-06-12 13:28:02 +02:00
Roman Acevedo
84e4c62c6d fix(tests): test editor was showing previous shown plugin doc
fixes https://github.com/kestra-io/kestra-ee/issues/4066
2025-06-12 13:21:29 +02:00
Nicolas K.
9aa605e23b Feat/rework compatibility layer (#9490)
* feat(core): rework compatibility layer

* feat(core): #4062 rework compatibility layer

---------

Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-06-12 10:42:49 +02:00
Roman Acevedo
faa77aed79 feat(tests): add execution url in test result 2025-06-12 10:03:05 +02:00
brian-mulier-p
fdce552528 feat(core): introduce tasksWithState autocompletion (#9485)
part of #8350
2025-06-12 09:55:57 +02:00
brian.mulier
a028a61792 fix(core): avoid infinite load upon route redirect (#9480)
closes #9479
2025-06-11 17:03:52 +02:00
brian.mulier
023a77a320 fix(core): properly map labels filters from query (#9480)
closes #9324
2025-06-11 17:03:52 +02:00
brian.mulier
bfee04bca2 fix(core): prevent incompatible timeRange & start/endDate filters + prevent multiple scope filters (#9480)
closes #9240
2025-06-11 17:03:52 +02:00
YannC
3756f01bdf fix(ui): base the required prop on the requiredProperties list (#9433)
close #9377
2025-06-11 13:09:27 +02:00
YannC
c1240d7391 feat(ui): allow to close a tab with mouse middle click like in a navigator/ide (#9434) 2025-06-11 08:55:13 +02:00
YannC
ac37ae6032 fix(core): use Min annotation instead of Positive (#9432)
close #9380
2025-06-10 17:15:11 +02:00
github-actions[bot]
9e51b100b0 chore(version): update to version '0.23.0-rc3-SNAPSHOT' 2025-06-10 12:51:54 +00:00
Miloš Paunović
bc81e01608 fix(core)*: properly display chart colors for logs (#9429) 2025-06-10 13:51:56 +02:00
YannC.
9f2162c942 feat(): add Kestra plugin in the list 2025-06-10 12:44:09 +02:00
brian-mulier-p
97992d99ee fix(core): handle properly dot in nested keys & commas in quoted filter values (#9410) 2025-06-10 11:55:30 +02:00
brian.mulier
f90f6b8429 chore(deps): bump vitest to 3.2.3 2025-06-10 11:55:30 +02:00
brian.mulier
0f7360ae81 build(tests): replace workspaces with proper storybook config + working aliases 2025-06-10 11:53:11 +02:00
Florian Hussonnois
938590f31f fix(plugins): check whether plugin registry support versioning (#9122) 2025-06-10 11:49:40 +02:00
YannC.
b2d1c84a86 fix(): display correctly doc/chart preview when editing custom dashboard
close #9411
2025-06-10 10:25:41 +02:00
Ludovic DEHON
d7ca302830 feat(system): add server_type as global metrics tags 2025-06-10 09:23:14 +02:00
Roman Acevedo
8656e852cc build(ci): fix setversion workflow not making tag push trigger main 2025-06-09 18:03:49 +02:00
brian-mulier-p
cc72336350 fix(core): avoid adding invalid keys from query parameters to filter (#9383)
closes #9364
2025-06-09 18:03:49 +02:00
Roman Acevedo
316d89764e tests(core): add storybook on executions filters (#9354) 2025-06-09 18:03:49 +02:00
Barthélémy Ledoux
4873bf4d36 chore: upgrade storybook (#9326) 2025-06-09 14:40:21 +02:00
Florian Hussonnois
204bf7f5e1 chore: add script to update gradle kestraVersion prop on plugins 2025-06-09 14:31:45 +02:00
Loïc Mathieu
1e0950fdf8 fix(system): import flow should set the tenantId 2025-06-09 13:51:53 +02:00
github-actions[bot]
4cddc704f4 chore(version): update to version '0.23.0-rc2-SNAPSHOT' 2025-06-09 10:48:43 +00:00
Miloš Paunović
f2f0e29f93 fix(namespaces): properly load flows when changing namespace (#9393)
Closes https://github.com/kestra-io/kestra/issues/9352.
2025-06-09 12:34:36 +02:00
Miloš Paunović
95011e022e fix(namespaces): reload namespace once the id parameter changes (#9372)
Closes https://github.com/kestra-io/kestra-ee/issues/3630.
2025-06-06 12:25:37 +02:00
brian.mulier
65503b708a fix(core): add DefaultFilterLanguage as default in KestraFilter
closes #9365
2025-06-05 17:42:34 +02:00
brian-mulier-p
876b8cb2e6 fix(core): avoid crashing in case of taskrun having too large value (#9359)
closes #9312
2025-06-05 14:11:37 +02:00
Nicolas K.
f3b7592dfa fix(flows): #9319 error when puase with timeout trigger an execution (#9334)
* fix(flows): #9319 error when puase with timeout trigger an execution even after it's terminated

* fix(flows): only skip paused flow when execution is terminated

---------

Co-authored-by: nKwiatkowski <nkwiatkowski@kestra.io>
2025-06-05 10:15:49 +02:00
brian.mulier
4dbeaf86bb fix(core): larger debounce for filter 2025-06-05 09:48:53 +02:00
brian.mulier
f98e78399d fix(core): handle whitespaces in label key and value 2025-06-05 09:48:43 +02:00
brian.mulier
71dac0f311 fix(core): smarter autocomplete order in editor 2025-06-05 09:48:00 +02:00
brian-mulier-p
3077d0ac7a fix(core): additional plugins are now properly shown in plugin docs (#9329)
closes kestra-io/plugin-langchain4j#61
2025-06-05 09:46:57 +02:00
YannC.
9504bbaffe fix(ci): put back bump helm chart and remove if condition 2025-06-05 08:48:56 +02:00
YannC.
159c9373ad fix(ci): checkout actions from main branch 2025-06-04 21:12:56 +02:00
YannC.
55b9088b55 fix(ci): modify actions order 2025-06-04 21:06:17 +02:00
YannC.
601d1a0abb fix(ci): Correctly pass all the secrets through all workflows 2025-06-04 15:10:33 +02:00
Florian Hussonnois
4a1cf98f26 chore(version): bump to version '0.23.0-rc1-SNAPSHOT' 2025-06-04 14:07:30 +02:00
1834 changed files with 76859 additions and 111747 deletions

View File

@@ -37,16 +37,16 @@ ARG OS_ARCHITECTURE
RUN mkdir -p /usr/java
RUN echo "Building on platform: $BUILDPLATFORM"
RUN case "$BUILDPLATFORM" in \
"linux/amd64") OS_ARCHITECTURE="x64_linux" ;; \
"linux/arm64") OS_ARCHITECTURE="aarch64_linux" ;; \
"darwin/amd64") OS_ARCHITECTURE="x64_mac" ;; \
"darwin/arm64") OS_ARCHITECTURE="aarch64_mac" ;; \
"linux/amd64") OS_ARCHITECTURE="linux-x64" ;; \
"linux/arm64") OS_ARCHITECTURE="linux-aarch64" ;; \
"darwin/amd64") OS_ARCHITECTURE="macos-x64" ;; \
"darwin/arm64") OS_ARCHITECTURE="macos-aarch64" ;; \
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
esac && \
wget "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.7%2B6/OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz" && \
mv OpenJDK21U-jdk_${OS_ARCHITECTURE}_hotspot_21.0.7_6.tar.gz openjdk-21.0.7.tar.gz
RUN tar -xzvf openjdk-21.0.7.tar.gz && \
mv jdk-21.0.7+6 jdk-21 && \
wget "https://aka.ms/download-jdk/microsoft-jdk-21.0.6-$OS_ARCHITECTURE.tar.gz" && \
mv "microsoft-jdk-21.0.6-$OS_ARCHITECTURE.tar.gz" microsoft-jdk-21.0.6.tar.gz
RUN tar -xzvf microsoft-jdk-21.0.6.tar.gz && \
mv jdk-21.0.6+7 jdk-21 && \
mv jdk-21 /usr/java/
ENV JAVA_HOME=/usr/java/jdk-21
ENV PATH="$PATH:$JAVA_HOME/bin"

View File

@@ -23,15 +23,15 @@ In the meantime, you can move onto the next step...
---
### Requirements
- Java 21 (LTS versions).
> ⚠️ Java 24 and above are not supported yet and will fail with `invalid source release: 21`.
- Gradle (comes with wrapper `./gradlew`)
- Docker (optional, for running Kestra in containers)
### Development:
- Create a `.env.development.local` file in the `ui` folder and paste the following:
```bash
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
VITE_APP_API_URL=http://localhost:8080
```
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
@@ -74,6 +74,9 @@ kestra:
path: /tmp/kestra-wd/tmp
anonymous-usage-report:
enabled: false
server:
basic-auth:
enabled: false
datasources:
postgres:

View File

@@ -39,7 +39,7 @@
"yoavbls.pretty-ts-errors",
"github.vscode-github-actions",
"vscjava.vscode-java-pack",
"docker.docker"
"ms-azuretools.vscode-docker"
]
}
}

View File

@@ -32,7 +32,7 @@ Watch out for duplicates! If you are creating a new issue, please check existing
#### Requirements
The following dependencies are required to build Kestra locally:
- Java 21+
- Node 22+ and npm 10+
- Node 18+ and npm
- Python 3, pip and python venv
- Docker & Docker Compose
- an IDE (Intellij IDEA, Eclipse or VS Code)

View File

@@ -1,13 +1,10 @@
name: Bug report
description: Report a bug or unexpected behavior in the project
labels: ["bug", "area/backend", "area/frontend"]
description: File a bug report
body:
- type: markdown
attributes:
value: |
Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack). Don't forget to give us a star! ⭐
Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack).
- type: textarea
attributes:
label: Describe the issue
@@ -23,3 +20,7 @@ body:
- Kestra Version: develop
validations:
required: false
labels:
- bug
- area/backend
- area/frontend

View File

@@ -1,4 +1,4 @@
contact_links:
- name: Chat
url: https://kestra.io/slack
about: Chat with us on Slack
about: Chat with us on Slack.

View File

@@ -1,12 +1,13 @@
name: Feature request
description: Suggest a new feature or improvement to enhance the project
labels: ["enhancement", "area/backend", "area/frontend"]
description: Create a new feature request
body:
- type: textarea
attributes:
label: Feature description
placeholder: Tell us more about your feature request. Don't forget to give us a star! ⭐
placeholder: Tell us more about your feature request
validations:
required: true
labels:
- enhancement
- area/backend
- area/frontend

View File

@@ -26,10 +26,6 @@ updates:
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
ignore:
- dependency-name: "com.google.protobuf:*"
# Ignore versions of Protobuf that are equal to or greater than 4.0.0 as Orc still uses 3
versions: [ "[4,)" ]
# Maintain dependencies for NPM modules
- package-ecosystem: "npm"

View File

@@ -35,4 +35,4 @@ Remove this section if this change applies to all flows or to the documentation
If there are no setup requirements, you can remove this section.
Thank you for your contribution. ❤️ Don't forget to give us a star! ⭐ -->
Thank you for your contribution. ❤️ -->

View File

@@ -1,67 +0,0 @@
name: Auto-Translate UI keys and create PR
on:
schedule:
- cron: "0 9-21/3 * * 1-5" # Every 3 hours from 9 AM to 9 PM, Monday to Friday
workflow_dispatch:
inputs:
retranslate_modified_keys:
description: "Whether to re-translate modified keys even if they already have translations."
type: choice
options:
- "false"
- "true"
default: "false"
required: false
jobs:
translations:
name: Translations
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v5
name: Checkout
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: "3.x"
- name: Install Python dependencies
run: pip install gitpython openai
- name: Generate translations
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
- name: Set up Node
uses: actions/setup-node@v6
with:
node-version: "20.x"
- name: Set up Git
run: |
git config --global user.name "GitHub Action"
git config --global user.email "actions@github.com"
- name: Commit and create PR
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH_NAME="chore/update-translations-$(date +%s)"
git checkout -b $BRANCH_NAME
git add ui/src/translations/*.json
if git diff --cached --quiet; then
echo "No changes to commit. Exiting with success."
exit 0
fi
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
gh pr create --title "Translations from en.json" --body $'This PR was created automatically by a GitHub Action.\n\nSomeone from the @kestra-io/frontend team needs to review and merge.' --base ${{ github.ref_name }} --head $BRANCH_NAME
- name: Check keys matching
run: node ui/src/translations/check.js

View File

@@ -1,85 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
name: "CodeQL"
on:
schedule:
- cron: '0 5 * * 1'
workflow_dispatch: {}
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# Override automatic language detection by changing the below list
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
language: ['java', 'javascript']
# Learn more...
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
steps:
- name: Checkout repository
uses: actions/checkout@v5
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Set up JDK
- name: Set up JDK
uses: actions/setup-java@v5
if: ${{ matrix.language == 'java' }}
with:
distribution: 'temurin'
java-version: 21
- name: Setup gradle
if: ${{ matrix.language == 'java' }}
uses: gradle/actions/setup-gradle@v5
- name: Build with Gradle
if: ${{ matrix.language == 'java' }}
run: ./gradlew testClasses -x :ui:assembleFrontend
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
if: ${{ matrix.language != 'java' }}
uses: github/codeql-action/autobuild@v4
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4

View File

@@ -1,15 +0,0 @@
name: 'E2E tests scheduling'
# 'New E2E tests implementation started by Roman. Based on playwright in npm UI project, tests Kestra OSS develop docker image. These tests are written from zero, lets make them unflaky from the start!.'
on:
schedule:
- cron: "0 * * * *" # Every hour
workflow_dispatch:
inputs:
noInputYet:
description: 'not input yet.'
required: false
type: string
default: "no input"
jobs:
e2e:
uses: kestra-io/actions/.github/workflows/kestra-oss-e2e-tests.yml@main

View File

@@ -1,85 +0,0 @@
name: Create new release branch
run-name: "Create new release branch Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0)'
required: true
type: string
nextVersion:
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
required: true
type: string
env:
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
NEXT_VERSION: "${{ github.event.inputs.nextVersion }}"
jobs:
release:
name: Release Kestra
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/develop'
steps:
# Checks
- name: Check Inputs
run: |
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0$ ]]; then
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0$"
exit 1
fi
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
exit 1;
fi
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
path: kestra
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
caches-enabled: true
- name: Configure Git
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
cd kestra
# Create and push release branch
git checkout -B "$PUSH_RELEASE_BRANCH";
git pull origin "$PUSH_RELEASE_BRANCH" --rebase || echo "No existing branch to pull";
git push -u origin "$PUSH_RELEASE_BRANCH";
# Run gradle release
git checkout develop;
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
./gradlew release -Prelease.useAutomaticVersion=true \
-Prelease.releaseVersion="${RELEASE_VERSION}" \
-Prelease.newVersion="${NEXT_VERSION}" \
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}" \
-Prelease.failOnSnapshotDependencies=false
else
./gradlew release -Prelease.useAutomaticVersion=true \
-Prelease.releaseVersion="${RELEASE_VERSION}" \
-Prelease.newVersion="${NEXT_VERSION}" \
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}"
fi

View File

@@ -1,74 +0,0 @@
name: Run Gradle Release for Kestra Plugins
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0)'
required: true
type: string
nextVersion:
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
required: true
type: string
dryRun:
description: 'Use DRY_RUN mode'
required: false
default: 'false'
jobs:
release:
name: Release plugins
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: true
node-enabled: true
python-enabled: true
# Get Plugins List
- name: Get Plugins List
uses: kestra-io/actions/composite/kestra-oss/kestra-oss-plugins-list@main
id: plugins-list
with:
plugin-version: 'LATEST'
- name: 'Configure Git'
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Run Gradle Release
if: ${{ github.event.inputs.dryRun == 'false' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/release-plugins.sh;
./dev-tools/release-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--next-version=${{github.event.inputs.nextVersion}} \
--yes \
${{ steps.plugins-list.outputs.repositories }}
- name: Run Gradle Release (DRY_RUN)
if: ${{ github.event.inputs.dryRun == 'true' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/release-plugins.sh;
./dev-tools/release-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--next-version=${{github.event.inputs.nextVersion}} \
--dry-run \
--yes \
${{ steps.plugins-list.outputs.repositories }}

View File

@@ -1,60 +0,0 @@
name: Set Version and Tag Plugins
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.0)'
required: true
type: string
dryRun:
description: 'Use DRY_RUN mode'
required: false
default: 'false'
jobs:
tag:
name: Release plugins
runs-on: ubuntu-latest
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Get Plugins List
- name: Get Plugins List
uses: kestra-io/actions/composite/kestra-oss/kestra-oss-plugins-list@main
id: plugins-list
with:
plugin-version: 'LATEST'
- name: 'Configure Git'
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Set Version and Tag Plugins
if: ${{ github.event.inputs.dryRun == 'false' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/setversion-tag-plugins.sh;
./dev-tools/setversion-tag-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--yes \
${{ steps.plugins-list.outputs.repositories }}
- name: Set Version and Tag Plugins (DRY_RUN)
if: ${{ github.event.inputs.dryRun == 'true' }}
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
chmod +x ./dev-tools/setversion-tag-plugins.sh;
./dev-tools/setversion-tag-plugins.sh \
--release-version=${{github.event.inputs.releaseVersion}} \
--dry-run \
--yes \
${{ steps.plugins-list.outputs.repositories }}

View File

@@ -1,65 +0,0 @@
name: Start release
run-name: "Start release of Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
on:
workflow_dispatch:
inputs:
releaseVersion:
description: 'The release version (e.g., 0.21.1)'
required: true
type: string
permissions:
contents: write
env:
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
jobs:
release:
name: Release Kestra
runs-on: ubuntu-latest
steps:
- name: Parse and Check Inputs
id: parse-and-check-inputs
run: |
CURRENT_BRANCH="${{ github.ref_name }}"
if ! [[ "$CURRENT_BRANCH" == "develop" ]]; then
echo "You can only run this workflow on develop, but you ran it on $CURRENT_BRANCH"
exit 1
fi
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)(\.[0-9]+)(-rc[0-9])?(-SNAPSHOT)?$ ]]; then
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
exit 1
fi
# Extract the major and minor versions
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
echo "release_branch=${RELEASE_BRANCH}" >> $GITHUB_OUTPUT
# Checkout
- name: Checkout
uses: actions/checkout@v5
with:
fetch-depth: 0
token: ${{ secrets.GH_PERSONAL_TOKEN }}
ref: ${{ steps.parse-and-check-inputs.outputs.release_branch }}
# Configure
- name: Git - Configure
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
# Execute
- name: Start release by updating version and pushing a new tag
env:
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
run: |
# Update version
sed -i "s/^version=.*/version=$RELEASE_VERSION/" ./gradle.properties
git add ./gradle.properties
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
git push
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
git push --tags

View File

@@ -67,24 +67,20 @@ jobs:
end:
runs-on: ubuntu-latest
needs: [backend-tests, frontend-tests, publish-develop-docker, publish-develop-maven]
if: "always() && github.repository == 'kestra-io/kestra'"
needs: [publish-develop-docker, publish-develop-maven]
if: always()
steps:
- run: echo "end CI of failed or success"
- name: Trigger EE Workflow
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4
if: "!contains(needs.*.result, 'failure') && github.ref == 'refs/heads/develop'"
uses: peter-evans/repository-dispatch@v3
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
with:
token: ${{ secrets.GH_PERSONAL_TOKEN }}
repository: kestra-io/kestra-ee
event-type: "oss-updated"
# Slack
- run: echo "mark job as failure to forward error to Slack action" && exit 1
if: ${{ contains(needs.*.result, 'failure') }}
- name: Slack - Notification
if: ${{ always() && contains(needs.*.result, 'failure') }}
if: ${{ failure() && env.SLACK_WEBHOOK_URL != 0 && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') }}
uses: kestra-io/actions/composite/slack-status@main
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
channel: 'C09FF36GKE1'

View File

@@ -13,6 +13,11 @@ on:
required: true
type: boolean
default: false
plugin-version:
description: '(deprecated) Plugin version window for old Kestra releases using .plugins file (0.22 to 0.24). If omitted, then plugin list will be fetched from the API compatible versions endpoint'
required: false
type: string
default: "[0.23,0.24)"
dry-run:
description: 'Dry run mode that will not write or release anything'
required: true
@@ -25,6 +30,7 @@ jobs:
if: startsWith(github.ref, 'refs/tags/v')
uses: kestra-io/actions/.github/workflows/kestra-oss-publish-docker.yml@main
with:
plugin-version: ${{ inputs.plugin-version }}
retag-latest: ${{ inputs.retag-latest }}
retag-lts: ${{ inputs.retag-lts }}
dry-run: ${{ inputs.dry-run }}

View File

@@ -43,82 +43,8 @@ jobs:
# Upload dependency check report
- name: Upload dependency check report
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: dependency-check-report
path: build/reports/dependency-check-report.html
develop-image-check:
name: Image Check (develop)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: false
node-enabled: false
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
with:
image-ref: kestra/kestra:develop
format: 'template'
template: '@/contrib/sarif.tpl'
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
skip-dirs: /app/plugins
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v4
with:
sarif_file: 'trivy-results.sarif'
category: docker-
latest-image-check:
name: Image Check (latest)
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
steps:
# Checkout
- uses: actions/checkout@v5
with:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
id: build
with:
java-enabled: false
node-enabled: false
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
with:
image-ref: kestra/kestra:latest
format: table
skip-dirs: /app/plugins
scanners: vuln
severity: 'CRITICAL,HIGH'
output: 'trivy-results.sarif'
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v4
with:
sarif_file: 'trivy-results.sarif'
category: docker-

View File

@@ -3,12 +3,10 @@
# Format: <RepositoryName>:<GroupId>:<ArtifactId>:<Version>
#
# Uncomment the lines corresponding to the plugins to be installed:
#plugin-ai:io.kestra.plugin:plugin-ai:LATEST
#plugin-airbyte:io.kestra.plugin:plugin-airbyte:LATEST
#plugin-airflow:io.kestra.plugin:plugin-airflow:LATEST
#plugin-amqp:io.kestra.plugin:plugin-amqp:LATEST
#plugin-ansible:io.kestra.plugin:plugin-ansible:LATEST
#plugin-anthropic:io.kestra.plugin:plugin-anthropic:LATEST
#plugin-aws:io.kestra.plugin:plugin-aws:LATEST
#plugin-azure:io.kestra.plugin:plugin-azure:LATEST
#plugin-cassandra:io.kestra.plugin:plugin-cassandra:LATEST
@@ -19,7 +17,6 @@
#plugin-databricks:io.kestra.plugin:plugin-databricks:LATEST
#plugin-datahub:io.kestra.plugin:plugin-datahub:LATEST
#plugin-dataform:io.kestra.plugin:plugin-dataform:LATEST
#plugin-datagen:io.kestra.plugin:plugin-datagen:LATEST
#plugin-dbt:io.kestra.plugin:plugin-dbt:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-db2:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-mongodb:LATEST
@@ -27,16 +24,13 @@
#plugin-debezium:io.kestra.plugin:plugin-debezium-oracle:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-postgres:LATEST
#plugin-debezium:io.kestra.plugin:plugin-debezium-sqlserver:LATEST
#plugin-deepseek:io.kestra.plugin:plugin-deepseek:LATEST
#plugin-docker:io.kestra.plugin:plugin-docker:LATEST
#plugin-elasticsearch:io.kestra.plugin:plugin-elasticsearch:LATEST
#plugin-fivetran:io.kestra.plugin:plugin-fivetran:LATEST
#plugin-fs:io.kestra.plugin:plugin-fs:LATEST
#plugin-gcp:io.kestra.plugin:plugin-gcp:LATEST
#plugin-gemini:io.kestra.plugin:plugin-gemini:LATEST
#plugin-git:io.kestra.plugin:plugin-git:LATEST
#plugin-github:io.kestra.plugin:plugin-github:LATEST
#plugin-gitlab:io.kestra.plugin:plugin-gitlab:LATEST
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
#plugin-graphql:io.kestra.plugin:plugin-graphql:LATEST
@@ -66,42 +60,34 @@
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
#plugin-jms:io.kestra.plugin:plugin-jms:LATEST
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
#plugin-langchain4j:io.kestra.plugin:plugin-langchain4j:LATEST
#plugin-ldap:io.kestra.plugin:plugin-ldap:LATEST
#plugin-linear:io.kestra.plugin:plugin-linear:LATEST
#plugin-malloy:io.kestra.plugin:plugin-malloy:LATEST
#plugin-meilisearch:io.kestra.plugin:plugin-meilisearch:LATEST
#plugin-minio:io.kestra.plugin:plugin-minio:LATEST
#plugin-mistral:io.kestra.plugin:plugin-mistral:LATEST
#plugin-modal:io.kestra.plugin:plugin-modal:LATEST
#plugin-mongodb:io.kestra.plugin:plugin-mongodb:LATEST
#plugin-mqtt:io.kestra.plugin:plugin-mqtt:LATEST
#plugin-nats:io.kestra.plugin:plugin-nats:LATEST
#plugin-neo4j:io.kestra.plugin:plugin-neo4j:LATEST
#plugin-notifications:io.kestra.plugin:plugin-notifications:LATEST
#plugin-notion:io.kestra.plugin:plugin-notion:LATEST
#plugin-ollama:io.kestra.plugin:plugin-ollama:LATEST
#plugin-openai:io.kestra.plugin:plugin-openai:LATEST
#plugin-opensearch:io.kestra.plugin:plugin-opensearch:LATEST
#plugin-perplexity:io.kestra.plugin:plugin-perplexity:LATEST
#plugin-powerbi:io.kestra.plugin:plugin-powerbi:LATEST
#plugin-pulsar:io.kestra.plugin:plugin-pulsar:LATEST
#plugin-redis:io.kestra.plugin:plugin-redis:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-bun:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-deno:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-go:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-groovy:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jbang:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-julia:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-jython:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-lua:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-nashorn:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-node:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-perl:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-php:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-powershell:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-python:LATEST
#plugin-scripts:io.kestra.plugin:plugin-script-r:LATEST
@@ -109,18 +95,16 @@
#plugin-scripts:io.kestra.plugin:plugin-script-shell:LATEST
#plugin-serdes:io.kestra.plugin:plugin-serdes:LATEST
#plugin-servicenow:io.kestra.plugin:plugin-servicenow:LATEST
#plugin-sifflet:io.kestra.plugin:plugin-sifflet:LATEST
#plugin-singer:io.kestra.plugin:plugin-singer:LATEST
#plugin-soda:io.kestra.plugin:plugin-soda:LATEST
#plugin-solace:io.kestra.plugin:plugin-solace:LATEST
#plugin-spark:io.kestra.plugin:plugin-spark:LATEST
#plugin-sqlmesh:io.kestra.plugin:plugin-sqlmesh:LATEST
#plugin-supabase:io.kestra.plugin:plugin-supabase:LATEST
#plugin-surrealdb:io.kestra.plugin:plugin-surrealdb:LATEST
#plugin-terraform:io.kestra.plugin:plugin-terraform:LATEST
#plugin-transform:io.kestra.plugin:plugin-transform-grok:LATEST
#plugin-transform:io.kestra.plugin:plugin-transform-json:LATEST
#plugin-tika:io.kestra.plugin:plugin-tika:LATEST
#plugin-trivy:io.kestra.plugin:plugin-trivy:LATEST
#plugin-weaviate:io.kestra.plugin:plugin-weaviate:LATEST
#plugin-zendesk:io.kestra.plugin:plugin-zendesk:LATEST
#plugin-typesense:io.kestra.plugin:plugin-typesense:LATEST

305
AGENTS.md
View File

@@ -1,305 +0,0 @@
# Kestra AGENTS.md
This file provides guidance for AI coding agents working on the Kestra project. Kestra is an open-source data orchestration and scheduling platform built with Java (Micronaut) and Vue.js.
## Repository Layout
- **`core/`**: Core Kestra framework and task definitions
- **`cli/`**: Command-line interface and server implementation
- **`webserver/`**: REST API server implementation
- **`ui/`**: Vue.js frontend application
- **`jdbc-*`**: Database connector modules (H2, MySQL, PostgreSQL)
- **`script/`**: Script execution engine
- **`storage-local/`**: Local file storage implementation
- **`repository-memory/`**: In-memory repository implementation
- **`runner-memory/`**: In-memory execution runner
- **`processor/`**: Task processing engine
- **`model/`**: Data models and Data Transfer Objects
- **`platform/`**: Platform-specific implementations
- **`tests/`**: Integration test framework
- **`e2e-tests/`**: End-to-end testing suite
## Development Environment
### Prerequisites
- Java 21+
- Node.js 22+ and npm
- Python 3, pip, and python venv
- Docker & Docker Compose
- Gradle (wrapper included)
### Quick Setup with Devcontainer
The easiest way to get started is using the provided devcontainer:
1. Install VSCode Remote Development extension
2. Run `Dev Containers: Open Folder in Container...` from command palette
3. Select the Kestra root folder
4. Wait for Gradle build to complete
### Manual Setup
1. Clone the repository
2. Run `./gradlew build` to build the backend
3. Navigate to `ui/` and run `npm install`
4. Create configuration files as described below
## Configuration Files
### Backend Configuration
Create `cli/src/main/resources/application-override.yml`:
**Local Mode (H2 database):**
```yaml
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
**Standalone Mode (PostgreSQL):**
```yaml
kestra:
repository:
type: postgres
storage:
type: local
local:
base-path: "/app/storage"
queue:
type: postgres
tasks:
tmp-dir:
path: /tmp/kestra-wd/tmp
anonymous-usage-report:
enabled: false
datasources:
postgres:
url: jdbc:postgresql://host.docker.internal:5432/kestra
driverClassName: org.postgresql.Driver
username: kestra
password: k3str4
flyway:
datasources:
postgres:
enabled: true
locations:
- classpath:migrations/postgres
ignore-migration-patterns: "*:missing,*:future"
out-of-order: true
micronaut:
server:
cors:
enabled: true
configurations:
all:
allowedOrigins:
- http://localhost:5173
```
### Frontend Configuration
Create `ui/.env.development.local` for environment variables.
## Running the Application
### Backend
- **Local mode**: `./gradlew runLocal` (uses H2 database)
- **Standalone mode**: Use VSCode Run and Debug with main class `io.kestra.cli.App` and args `server standalone`
### Frontend
- Navigate to `ui/` directory
- Run `npm run dev` for development server (port 5173)
- Run `npm run build` for production build
## Building and Testing
### Backend
```bash
# Build the project
./gradlew build
# Run tests
./gradlew test
# Run specific module tests
./gradlew :core:test
# Clean build
./gradlew clean build
```
### Frontend
```bash
cd ui
npm install
npm run test
npm run lint
npm run build
```
### End-to-End Tests
```bash
# Build and start E2E tests
./build-and-start-e2e-tests.sh
# Or use the Makefile
make install
make install-plugins
make start-standalone-postgres
```
## Development Guidelines
### Java Backend
- Use Java 21 features
- Follow Micronaut framework patterns
- Add Swagger annotations for API documentation
- Use annotation processors (enable in IDE)
- Set `MICRONAUT_ENVIRONMENTS=local,override` for custom config
- Set `KESTRA_PLUGINS_PATH` for custom plugin loading
### Vue.js Frontend
- Vue 3 with Composition API
- TypeScript for type safety
- Vite for build tooling
- ESLint and Prettier for code quality
- Component-based architecture in `src/components/`
### Code Style
- Follow `.editorconfig` settings
- Use 4 spaces for Java, 2 spaces for YAML/JSON/CSS
- Enable format on save in VSCode
- Use Prettier for frontend code formatting
## Testing Strategy
### Backend Testing
- Unit tests in `src/test/java/`
- Integration tests in `tests/` module
- Use Micronaut test framework
- Test both local and standalone modes
### Frontend Testing
- Unit tests with Jest
- E2E tests with Playwright
- Component testing with Storybook
- Run `npm run test:unit` and `npm run test:e2e`
## Plugin Development
### Creating Plugins
- Follow the [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/)
- Place JAR files in `KESTRA_PLUGINS_PATH`
- Use the plugin template structure
- Test with both local and standalone modes
### Plugin Loading
- Set `KESTRA_PLUGINS_PATH` environment variable
- Use devcontainer mounts for local development
- Plugins are loaded at startup
## Common Issues and Solutions
### JavaScript Heap Out of Memory
Set `NODE_OPTIONS=--max-old-space-size=4096` environment variable.
### CORS Issues
Ensure backend CORS is configured for `http://localhost:5173` when using frontend dev server.
### Database Connection Issues
- Use `host.docker.internal` instead of `localhost` when connecting from devcontainer
- Verify PostgreSQL is running and accessible
- Check database credentials and permissions
### Gradle Build Issues
- Clear Gradle cache: `./gradlew clean`
- Check Java version compatibility
- Verify all dependencies are available
## Pull Request Guidelines
### Before Submitting
1. Run all tests: `./gradlew test` and `npm test`
2. Check code formatting: `./gradlew spotlessCheck`
3. Verify CORS configuration if changing API
4. Test both local and standalone modes
5. Update documentation for user-facing changes
### Commit Messages
- Follow conventional commit format
- Use present tense ("Add feature" not "Added feature")
- Reference issue numbers when applicable
- Keep commits focused and atomic
### Review Checklist
- [ ] All tests pass
- [ ] Code follows project style guidelines
- [ ] Documentation is updated
- [ ] No breaking changes without migration guide
- [ ] CORS properly configured if API changes
- [ ] Both local and standalone modes tested
## Useful Commands
```bash
# Quick development commands
./gradlew runLocal # Start local backend
./gradlew :ui:build # Build frontend
./gradlew clean build # Clean rebuild
npm run dev # Start frontend dev server
make install # Install Kestra locally
make start-standalone-postgres # Start with PostgreSQL
# Testing commands
./gradlew test # Run all backend tests
./gradlew :core:test # Run specific module tests
npm run test # Run frontend tests
npm run lint # Lint frontend code
```
## Getting Help
- Open a [GitHub issue](https://github.com/kestra-io/kestra/issues)
- Join the [Kestra Slack community](https://kestra.io/slack)
- Check the [main documentation](https://kestra.io/docs)
## Environment Variables
| Variable | Description | Default |
|----------|-------------|---------|
| `MICRONAUT_ENVIRONMENTS` | Custom config environments | `local,override` |
| `KESTRA_PLUGINS_PATH` | Path to custom plugins | `/workspaces/kestra/local/plugins` |
| `NODE_OPTIONS` | Node.js options | `--max-old-space-size=4096` |
| `JAVA_HOME` | Java installation path | `/usr/java/jdk-21` |
Remember: Always test your changes in both local and standalone modes, and ensure CORS is properly configured for frontend development.

View File

@@ -77,7 +77,7 @@ install-plugins:
else \
${KESTRA_BASEDIR}/bin/kestra plugins install $$CURRENT_PLUGIN \
--plugins ${KESTRA_BASEDIR}/plugins \
--repositories=https://central.sonatype.com/repository/maven-snapshots || exit 1; \
--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots || exit 1; \
fi \
done < $$PLUGIN_LIST
@@ -89,7 +89,7 @@ build-docker: build-exec
--compress \
--rm \
-f ./Dockerfile \
--build-arg="APT_PACKAGES=python3 python-is-python3 python3-pip curl jattach" \
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
--build-arg="PYTHON_LIBRARIES=kestra" \
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
@@ -130,6 +130,9 @@ datasources:
username: kestra
password: k3str4
kestra:
server:
basic-auth:
enabled: false
encryption:
secret-key: 3ywuDa/Ec61VHkOX3RlI9gYq7CaD0mv0Pf3DHtAXA6U=
repository:

View File

@@ -19,12 +19,9 @@
<br />
<p align="center">
<a href="https://twitter.com/kestra_io" style="margin: 0 10px;">
<img height="25" src="https://kestra.io/twitter.svg" alt="twitter" width="35" height="25" /></a>
<a href="https://www.linkedin.com/company/kestra/" style="margin: 0 10px;">
<img height="25" src="https://kestra.io/linkedin.svg" alt="linkedin" width="35" height="25" /></a>
<a href="https://www.youtube.com/@kestra-io" style="margin: 0 10px;">
<img height="25" src="https://kestra.io/youtube.svg" alt="youtube" width="35" height="25" /></a>
<a href="https://x.com/kestra_io"><img height="25" src="https://kestra.io/twitter.svg" alt="X(formerly Twitter)" /></a> &nbsp;
<a href="https://www.linkedin.com/company/kestra/"><img height="25" src="https://kestra.io/linkedin.svg" alt="linkedin" /></a> &nbsp;
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a> &nbsp;
</p>
<p align="center">
@@ -36,10 +33,10 @@
<p align="center">
<a href="https://go.kestra.io/video/product-overview" target="_blank">
<img src="https://kestra.io/startvideo.png" alt="Get started in 3 minutes with Kestra" width="640px" />
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
</a>
</p>
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 3 minutes.</i></p>
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 4 minutes.</i></p>
## 🌟 What is Kestra?
@@ -68,11 +65,9 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
## 🚀 Quick Start
### Launch on AWS (CloudFormation)
### Try the Live Demo
Deploy Kestra on AWS using our CloudFormation template:
[![Launch Stack](https://cdn.rawgit.com/buildkite/cloudformation-launch-stack-button-svg/master/launch-stack.svg)](https://console.aws.amazon.com/cloudformation/home#/stacks/create/review?templateURL=https://kestra-deployment-templates.s3.eu-west-3.amazonaws.com/aws/cloudformation/ec2-rds-s3/kestra-oss.yaml&stackName=kestra-oss)
Try Kestra with our [**Live Demo**](https://demo.kestra.io/ui/login?auto). No installation required!
### Get Started Locally in 5 Minutes
@@ -104,7 +99,7 @@ If you're on Windows and use WSL (Linux-based environment in Windows):
```bash
docker run --pull=always --rm -it -p 8080:8080 --user=root \
-v "/var/run/docker.sock:/var/run/docker.sock" \
-v "/mnt/c/Temp:/tmp" kestra/kestra:latest server local
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
Check our [Installation Guide](https://kestra.io/docs/installation) for other deployment options (Docker Compose, Podman, Kubernetes, AWS, GCP, Azure, and more).

44
build-and-start-e2e-tests.sh Executable file → Normal file
View File

@@ -1,47 +1,7 @@
#!/bin/bash
set -e
# E2E main script that can be run on a dev computer or in the CI
# it will build the backend of the current git repo and the frontend
# create a docker image out of it
# run tests on this image
LOCAL_IMAGE_VERSION="local-e2e-$(date +%s)"
echo "Running E2E"
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
start_time=$(date +%s)
echo ""
echo "Building the image for this current repository"
make clean
make build-docker VERSION=$LOCAL_IMAGE_VERSION
end_time=$(date +%s)
elapsed=$(( end_time - start_time ))
echo ""
echo "building elapsed time: ${elapsed} seconds"
echo ""
echo "Start time: $(date '+%Y-%m-%d %H:%M:%S')"
start_time2=$(date +%s)
echo "cd ./ui"
cd ./ui
echo "npm i"
npm i
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"
end_time2=$(date +%s)
elapsed2=$(( end_time2 - start_time2 ))
echo ""
echo "Tests elapsed time: ${elapsed2} seconds"
echo ""
total_elapsed=$(( elapsed + elapsed2 ))
echo "Total elapsed time: ${total_elapsed} seconds"
echo ""
echo "There is not E2E tests on this release"
echo "This step will not run anything, it is just here to comply with centralized CI"
exit 0

View File

@@ -16,28 +16,28 @@ plugins {
id "java"
id 'java-library'
id "idea"
id "com.gradleup.shadow" version "8.3.9"
id "com.gradleup.shadow" version "8.3.6"
id "application"
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.0.1.6134"
id "org.sonarqube" version "6.2.0.5505"
id 'jacoco-report-aggregation'
// helper
id "com.github.ben-manes.versions" version "0.53.0"
id "com.github.ben-manes.versions" version "0.52.0"
// front
id 'com.github.node-gradle.node' version '7.1.0'
// release
id 'net.researchgate.release' version '3.1.0'
id "com.gorylenko.gradle-git-properties" version "2.5.3"
id "com.gorylenko.gradle-git-properties" version "2.5.0"
id 'signing'
id "com.vanniktech.maven.publish" version "0.34.0"
id "com.vanniktech.maven.publish" version "0.33.0"
// OWASP dependency check
id "org.owasp.dependencycheck" version "12.1.8" apply false
id "org.owasp.dependencycheck" version "12.1.1" apply false
}
idea {
@@ -71,11 +71,6 @@ dependencies {
* Dependencies
**********************************************************************************************************************/
allprojects {
tasks.withType(GenerateModuleMetadata).configureEach {
suppressedValidationErrors.add('enforced-platform')
}
if (it.name != 'platform') {
group = "io.kestra"
@@ -148,7 +143,6 @@ allprojects {
implementation group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names'
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-guava'
implementation group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310'
implementation group: 'com.fasterxml.uuid', name: 'java-uuid-generator'
// kestra
implementation group: 'com.devskiller.friendly-id', name: 'friendly-id'
@@ -168,9 +162,8 @@ allprojects {
/**********************************************************************************************************************\
* Test
**********************************************************************************************************************/
subprojects {subProj ->
if (subProj.name != 'platform' && subProj.name != 'jmh-benchmarks') {
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
apply plugin: "com.adarshr.test-logger"
java {
@@ -222,14 +215,6 @@ subprojects {subProj ->
t.environment 'ENV_TEST1', "true"
t.environment 'ENV_TEST2', "Pass by env"
if (subProj.name == 'core' || subProj.name == 'jdbc-h2' || subProj.name == 'jdbc-mysql' || subProj.name == 'jdbc-postgres') {
// JUnit 5 parallel settings
t.systemProperty 'junit.jupiter.execution.parallel.enabled', 'true'
t.systemProperty 'junit.jupiter.execution.parallel.mode.default', 'concurrent'
t.systemProperty 'junit.jupiter.execution.parallel.mode.classes.default', 'same_thread'
t.systemProperty 'junit.jupiter.execution.parallel.config.strategy', 'dynamic'
}
}
tasks.register('flakyTest', Test) { Test t ->
@@ -270,14 +255,14 @@ subprojects {subProj ->
}
testlogger {
theme = 'mocha-parallel'
showExceptions = true
showFullStackTraces = true
showCauses = true
slowThreshold = 2000
showStandardStreams = true
showPassedStandardStreams = false
showSkippedStandardStreams = true
theme 'mocha-parallel'
showExceptions true
showFullStackTraces true
showCauses true
slowThreshold 2000
showStandardStreams true
showPassedStandardStreams false
showSkippedStandardStreams true
}
}
}
@@ -372,7 +357,7 @@ tasks.named('testCodeCoverageReport') {
subprojects {
sonar {
properties {
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml,$projectDir.parentFile.path/build/reports/jacoco/test/testCodeCoverageReport.xml"
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml"
}
}
}
@@ -455,7 +440,7 @@ jar {
shadowJar {
archiveClassifier.set(null)
mergeServiceFiles()
zip64 = true
zip64 true
}
distZip.dependsOn shadowJar
@@ -472,8 +457,8 @@ def executableDir = layout.buildDirectory.dir("executable")
def executable = layout.buildDirectory.file("executable/${project.name}-${project.version}").get().asFile
tasks.register('writeExecutableJar') {
group = "build"
description = "Write an executable jar from shadow jar"
group "build"
description "Write an executable jar from shadow jar"
dependsOn = [shadowJar]
final shadowJarFile = tasks.shadowJar.outputs.files.singleFile
@@ -499,8 +484,8 @@ tasks.register('writeExecutableJar') {
}
tasks.register('executableJar', Zip) {
group = "build"
description = "Zip the executable jar"
group "build"
description "Zip the executable jar"
dependsOn = [writeExecutableJar]
archiveFileName = "${project.name}-${project.version}.zip"
@@ -687,6 +672,11 @@ subprojects {subProject ->
}
}
}
tasks.withType(GenerateModuleMetadata).configureEach {
// Suppression this validation error as we want to enforce the Kestra platform
suppressedValidationErrors.add('enforced-platform')
}
}
}

View File

@@ -33,13 +33,8 @@ dependencies {
implementation project(":storage-local")
// Kestra server components
implementation project(":executor")
implementation project(":scheduler")
implementation project(":webserver")
implementation project(":worker")
//test
testImplementation project(':tests')
testImplementation "org.wiremock:wiremock-jetty12"
}

View File

@@ -40,7 +40,7 @@ import picocli.CommandLine.Option;
)
@Slf4j
@Introspected
public abstract class AbstractCommand implements Callable<Integer> {
abstract public class AbstractCommand implements Callable<Integer> {
@Inject
private ApplicationContext applicationContext;
@@ -93,7 +93,7 @@ public abstract class AbstractCommand implements Callable<Integer> {
this.startupHook.start(this);
}
if (pluginRegistryProvider != null && this.pluginsPath != null && loadExternalPlugins()) {
if (this.pluginsPath != null && loadExternalPlugins()) {
pluginRegistry = pluginRegistryProvider.get();
pluginRegistry.registerIfAbsent(pluginsPath);

View File

@@ -117,7 +117,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/validate", tenantService.getTenantIdAndAllowEETenants(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/validate", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
List<ValidateConstraintViolation> validations = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -7,6 +7,7 @@ import io.kestra.cli.commands.namespaces.NamespaceCommand;
import io.kestra.cli.commands.plugins.PluginCommand;
import io.kestra.cli.commands.servers.ServerCommand;
import io.kestra.cli.commands.sys.SysCommand;
import io.kestra.cli.commands.templates.TemplateCommand;
import io.micronaut.configuration.picocli.MicronautFactory;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
@@ -38,16 +39,17 @@ import java.util.concurrent.Callable;
PluginCommand.class,
ServerCommand.class,
FlowCommand.class,
TemplateCommand.class,
SysCommand.class,
ConfigCommand.class,
NamespaceCommand.class,
MigrationCommand.class
MigrationCommand.class,
}
)
@Introspected
public class App implements Callable<Integer> {
public static void main(String[] args) {
execute(App.class, new String [] { Environment.CLI }, args);
execute(App.class, args);
}
@Override
@@ -55,30 +57,23 @@ public class App implements Callable<Integer> {
return PicocliRunner.call(App.class, "--help");
}
protected static void execute(Class<?> cls, String[] environments, String... args) {
protected static void execute(Class<?> cls, String... args) {
// Log Bridge
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
// Init ApplicationContext
ApplicationContext applicationContext = App.applicationContext(cls, environments, args);
ApplicationContext applicationContext = App.applicationContext(cls, args);
// Call Picocli command
int exitCode = 0;
try {
exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
} catch (CommandLine.InitializationException e){
System.err.println("Could not initialize picoli ComandLine, err: " + e.getMessage());
e.printStackTrace();
exitCode = 1;
}
int exitCode = new CommandLine(cls, new MicronautFactory(applicationContext)).execute(args);
applicationContext.close();
// exit code
System.exit(Objects.requireNonNullElse(exitCode, 0));
}
/**
* Create an {@link ApplicationContext} with additional properties based on configuration files (--config) and
* forced Properties from current command.
@@ -87,13 +82,12 @@ public class App implements Callable<Integer> {
* @return the application context created
*/
protected static ApplicationContext applicationContext(Class<?> mainClass,
String[] environments,
String[] args) {
ApplicationContextBuilder builder = ApplicationContext
.builder()
.mainClass(mainClass)
.environments(environments);
.environments(Environment.CLI);
CommandLine cmd = new CommandLine(mainClass, CommandLine.defaultFactory());
continueOnParsingErrors(cmd);

View File

@@ -1,4 +1,4 @@
package io.kestra.core.validations;
package io.kestra.cli;
import io.micronaut.context.annotation.Context;
import io.micronaut.context.annotation.Requires;

View File

@@ -0,0 +1,36 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import jakarta.inject.Inject;
import picocli.CommandLine;
import java.nio.file.Files;
import java.nio.file.Path;
@CommandLine.Command(
name = "expand",
description = "Deprecated - expand a flow"
)
@Deprecated
public class FlowExpandCommand extends AbstractCommand {
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
private Path file;
@Inject
private ModelValidator modelValidator;
@Override
public Integer call() throws Exception {
super.call();
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
Flow flow = YamlParser.parse(content, Flow.class);
modelValidator.validate(flow);
stdOut(content);
return 0;
}
}

View File

@@ -8,7 +8,7 @@ import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.cli.StandAloneRunner;
import io.kestra.core.runners.StandAloneRunner;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
@@ -72,6 +72,7 @@ public class FlowTestCommand extends AbstractApiCommand {
public Integer call() throws Exception {
super.call();
StandAloneRunner runner = applicationContext.getBean(StandAloneRunner.class);
LocalFlowRepositoryLoader repositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
FlowRepositoryInterface flowRepository = applicationContext.getBean(FlowRepositoryInterface.class);
FlowInputOutput flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
@@ -88,7 +89,7 @@ public class FlowTestCommand extends AbstractApiCommand {
inputs.put(this.inputs.get(i), this.inputs.get(i+1));
}
try (StandAloneRunner runner = applicationContext.createBean(StandAloneRunner.class);){
try {
runner.run();
repositoryLoader.load(tenantService.getTenantId(tenantId), file.toFile());
@@ -102,6 +103,8 @@ public class FlowTestCommand extends AbstractApiCommand {
(flow, execution) -> flowInputOutput.readExecutionInputs(flow, execution, inputs),
Duration.ofHours(1)
);
runner.close();
} catch (ConstraintViolationException e) {
throw new CommandLine.ParameterException(this.spec.commandLine(), e.getMessage());
} catch (IOException | TimeoutException e) {

View File

@@ -21,8 +21,6 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import static io.kestra.core.utils.Rethrow.throwFunction;
@CommandLine.Command(
name = "updates",
description = "Create or update flows from a folder, and optionally delete the ones not present",
@@ -43,6 +41,7 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
@Inject
private TenantIdSelectorService tenantIdSelectorService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
@@ -51,7 +50,13 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
List<String> flows = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.map(throwFunction(path -> Files.readString(path, Charset.defaultCharset())))
.map(path -> {
try {
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.toList();
String body = "";

View File

@@ -24,8 +24,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
private FlowService flowService;
@Inject
private TenantIdSelectorService tenantIdSelectorService;
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
@@ -40,7 +39,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
FlowWithSource flow = (FlowWithSource) object;
List<String> warnings = new ArrayList<>();
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
warnings.addAll(flowService.warnings(flow, tenantIdSelectorService.getTenantIdAndAllowEETenants(tenantId)));
warnings.addAll(flowService.warnings(flow, tenantService.getTenantId(tenantId)));
return warnings;
},
(Object object) -> {

View File

@@ -0,0 +1,40 @@
package io.kestra.cli.commands.flows;
import com.google.common.io.Files;
import lombok.SneakyThrows;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.util.List;
import java.util.stream.Collectors;
@Deprecated
public abstract class IncludeHelperExpander {
public static String expand(String value, Path directory) throws IOException {
return value.lines()
.map(line -> line.contains("[[>") && line.contains("]]") ? expandLine(line, directory) : line)
.collect(Collectors.joining("\n"));
}
@SneakyThrows
private static String expandLine(String line, Path directory) {
String prefix = line.substring(0, line.indexOf("[[>"));
String suffix = line.substring(line.indexOf("]]") + 2, line.length());
String file = line.substring(line.indexOf("[[>") + 3 , line.indexOf("]]")).strip();
Path includePath = directory.resolve(file);
List<String> include = Files.readLines(includePath.toFile(), Charset.defaultCharset());
// handle single line directly with the suffix (should be between quotes or double-quotes
if(include.size() == 1) {
String singleInclude = include.getFirst();
return prefix + singleInclude + suffix;
}
// multi-line will be expanded with the prefix but no suffix
return include.stream()
.map(includeLine -> prefix + includeLine)
.collect(Collectors.joining("\n"));
}
}

View File

@@ -2,6 +2,7 @@ package io.kestra.cli.commands.flows.namespaces;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
import io.kestra.cli.commands.flows.IncludeHelperExpander;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.serializers.YamlParser;
import io.micronaut.core.type.Argument;
@@ -20,8 +21,6 @@ import java.nio.charset.Charset;
import java.nio.file.Files;
import java.util.List;
import static io.kestra.core.utils.Rethrow.throwFunction;
@CommandLine.Command(
name = "update",
description = "Update flows in namespace",
@@ -45,7 +44,13 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
List<String> flows = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.map(throwFunction(path -> Files.readString(path, Charset.defaultCharset())))
.map(path -> {
try {
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.toList();
String body = "";
@@ -59,7 +64,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
}
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -2,7 +2,6 @@ package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.kestra.cli.commands.migrations.metadata.MetadataMigrationCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@@ -14,7 +13,6 @@ import picocli.CommandLine;
mixinStandardHelpOptions = true,
subcommands = {
TenantMigrationCommand.class,
MetadataMigrationCommand.class
}
)
@Slf4j

View File

@@ -1,30 +0,0 @@
package io.kestra.cli.commands.migrations.metadata;
import io.kestra.cli.AbstractCommand;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "kv",
description = "populate metadata for KV"
)
@Slf4j
public class KvMetadataMigrationCommand extends AbstractCommand {
@Inject
private MetadataMigrationService metadataMigrationService;
@Override
public Integer call() throws Exception {
super.call();
try {
metadataMigrationService.kvMigration();
} catch (Exception e) {
System.err.println("❌ KV Metadata migration failed: " + e.getMessage());
e.printStackTrace();
return 1;
}
System.out.println("✅ KV Metadata migration complete.");
return 0;
}
}

View File

@@ -1,23 +0,0 @@
package io.kestra.cli.commands.migrations.metadata;
import io.kestra.cli.AbstractCommand;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "metadata",
description = "populate metadata for entities",
subcommands = {
KvMetadataMigrationCommand.class,
SecretsMetadataMigrationCommand.class
}
)
@Slf4j
public class MetadataMigrationCommand extends AbstractCommand {
@Override
public Integer call() throws Exception {
super.call();
return 0;
}
}

View File

@@ -1,89 +0,0 @@
package io.kestra.cli.commands.migrations.metadata;
import io.kestra.core.models.kv.PersistedKvMetadata;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
import io.kestra.core.storages.FileAttributes;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.kv.InternalKVStore;
import io.kestra.core.storages.kv.KVEntry;
import io.kestra.core.tenant.TenantService;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.time.Instant;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import static io.kestra.core.utils.Rethrow.throwConsumer;
import static io.kestra.core.utils.Rethrow.throwFunction;
@Singleton
public class MetadataMigrationService {
@Inject
private TenantService tenantService;
@Inject
private FlowRepositoryInterface flowRepository;
@Inject
private KvMetadataRepositoryInterface kvMetadataRepository;
@Inject
private StorageInterface storageInterface;
protected Map<String, List<String>> namespacesPerTenant() {
String tenantId = tenantService.resolveTenant();
return Map.of(tenantId, flowRepository.findDistinctNamespace(tenantId));
}
public void kvMigration() throws IOException {
this.namespacesPerTenant().entrySet().stream()
.flatMap(namespacesForTenant -> namespacesForTenant.getValue().stream().map(namespace -> Map.entry(namespacesForTenant.getKey(), namespace)))
.flatMap(throwFunction(namespaceForTenant -> {
InternalKVStore kvStore = new InternalKVStore(namespaceForTenant.getKey(), namespaceForTenant.getValue(), storageInterface, kvMetadataRepository);
List<FileAttributes> list = listAllFromStorage(storageInterface, namespaceForTenant.getKey(), namespaceForTenant.getValue());
Map<Boolean, List<KVEntry>> entriesByIsExpired = list.stream()
.map(throwFunction(fileAttributes -> KVEntry.from(namespaceForTenant.getValue(), fileAttributes)))
.collect(Collectors.partitioningBy(kvEntry -> Optional.ofNullable(kvEntry.expirationDate()).map(expirationDate -> Instant.now().isAfter(expirationDate)).orElse(false)));
entriesByIsExpired.get(true).forEach(kvEntry -> {
try {
storageInterface.delete(
namespaceForTenant.getKey(),
namespaceForTenant.getValue(),
kvStore.storageUri(kvEntry.key())
);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
return entriesByIsExpired.get(false).stream().map(kvEntry -> PersistedKvMetadata.from(namespaceForTenant.getKey(), kvEntry));
}))
.forEach(throwConsumer(kvMetadata -> {
if (kvMetadataRepository.findByName(kvMetadata.getTenantId(), kvMetadata.getNamespace(), kvMetadata.getName()).isEmpty()) {
kvMetadataRepository.save(kvMetadata);
}
}));
}
public void secretMigration() throws Exception {
throw new UnsupportedOperationException("Secret migration is not needed in the OSS version");
}
private static List<FileAttributes> listAllFromStorage(StorageInterface storage, String tenant, String namespace) throws IOException {
try {
return storage.list(tenant, namespace, URI.create(StorageContext.KESTRA_PROTOCOL + StorageContext.kvPrefix(namespace)));
} catch (FileNotFoundException e) {
return Collections.emptyList();
}
}
}

View File

@@ -1,30 +0,0 @@
package io.kestra.cli.commands.migrations.metadata;
import io.kestra.cli.AbstractCommand;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "secrets",
description = "populate metadata for secrets"
)
@Slf4j
public class SecretsMetadataMigrationCommand extends AbstractCommand {
@Inject
private MetadataMigrationService metadataMigrationService;
@Override
public Integer call() throws Exception {
super.call();
try {
metadataMigrationService.secretMigration();
} catch (Exception e) {
System.err.println("❌ Secrets Metadata migration failed: " + e.getMessage());
e.printStackTrace();
return 1;
}
System.out.println("✅ Secrets Metadata migration complete.");
return 0;
}
}

View File

@@ -49,7 +49,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
try (var files = Files.walk(from); DefaultHttpClient client = client()) {
if (delete) {
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + to, null)));
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + to, null)));
}
KestraIgnore kestraIgnore = new KestraIgnore(from);
@@ -67,7 +67,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
client.toBlocking().exchange(
this.requestOptions(
HttpRequest.POST(
apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + destination,
apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + destination,
body
).contentType(MediaType.MULTIPART_FORM_DATA)
)

View File

@@ -62,7 +62,7 @@ public class KvUpdateCommand extends AbstractApiCommand {
Duration ttl = expiration == null ? null : Duration.parse(expiration);
MutableHttpRequest<String> request = HttpRequest
.PUT(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/kv/" + key, value)
.contentType(MediaType.TEXT_PLAIN);
.contentType(MediaType.APPLICATION_JSON_TYPE);
if (ttl != null) {
request.header("ttl", ttl.toString());

View File

@@ -18,8 +18,6 @@ import java.nio.file.Paths;
import java.util.Base64;
import java.util.List;
import static io.kestra.core.models.Plugin.isDeprecated;
@CommandLine.Command(
name = "doc",
description = "Generate documentation for all plugins currently installed"
@@ -40,9 +38,6 @@ public class PluginDocCommand extends AbstractCommand {
@CommandLine.Option(names = {"--schema"}, description = "Also write JSON Schema for each task")
private boolean schema = false;
@CommandLine.Option(names = {"--skip-deprecated"},description = "Skip deprecated plugins when generating documentations")
private boolean skipDeprecated = false;
@Override
public Integer call() throws Exception {
super.call();
@@ -50,11 +45,6 @@ public class PluginDocCommand extends AbstractCommand {
PluginRegistry registry = pluginRegistryProvider.get();
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
if (skipDeprecated) {
plugins = plugins.stream()
.filter(plugin -> !isDeprecated(plugin.getClass()))
.toList();
}
boolean hasFailures = false;
for (RegisteredPlugin registeredPlugin : plugins) {

View File

@@ -2,28 +2,20 @@ package io.kestra.cli.commands.servers;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.contexts.KestraContext;
import lombok.extern.slf4j.Slf4j;
import jakarta.annotation.PostConstruct;
import picocli.CommandLine;
@Slf4j
public abstract class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
Integer serverPort;
@Override
public Integer call() throws Exception {
log.info("Machine information: {} available cpu(s), {}MB max memory, Java version {}", Runtime.getRuntime().availableProcessors(), maxMemoryInMB(), Runtime.version());
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
return super.call();
}
private long maxMemoryInMB() {
return Runtime.getRuntime().maxMemory() / 1024 / 1024;
}
protected static int defaultWorkerThread() {
return Runtime.getRuntime().availableProcessors() * 8;
return Runtime.getRuntime().availableProcessors() * 4;
}
}

View File

@@ -2,7 +2,7 @@ package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.Executor;
import io.kestra.core.runners.ExecutorInterface;
import io.kestra.core.services.SkipExecutionService;
import io.kestra.core.services.StartExecutorService;
import io.kestra.core.utils.Await;
@@ -64,7 +64,7 @@ public class ExecutorCommand extends AbstractServerCommand {
super.call();
Executor executorService = applicationContext.getBean(Executor.class);
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
executorService.run();
Await.until(() -> !this.applicationContext.isRunning());

View File

@@ -2,7 +2,7 @@ package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.Indexer;
import io.kestra.core.runners.IndexerInterface;
import io.kestra.core.utils.Await;
import io.kestra.core.services.SkipExecutionService;
import io.micronaut.context.ApplicationContext;
@@ -39,7 +39,7 @@ public class IndexerCommand extends AbstractServerCommand {
super.call();
Indexer indexer = applicationContext.getBean(Indexer.class);
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
indexer.run();
Await.until(() -> !this.applicationContext.isRunning());

View File

@@ -2,7 +2,7 @@ package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.models.ServerType;
import io.kestra.scheduler.AbstractScheduler;
import io.kestra.core.schedulers.AbstractScheduler;
import io.kestra.core.utils.Await;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;

View File

@@ -6,7 +6,7 @@ import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.models.ServerType;
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
import io.kestra.cli.StandAloneRunner;
import io.kestra.core.runners.StandAloneRunner;
import io.kestra.core.services.SkipExecutionService;
import io.kestra.core.services.StartExecutorService;
import io.kestra.core.utils.Await;
@@ -48,7 +48,7 @@ public class StandAloneCommand extends AbstractServerCommand {
@CommandLine.Option(names = "--tenant", description = "Tenant identifier, Required to load flows from path with the enterprise edition")
private String tenantId;
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to eight times the number of available processors. Set it to 0 to avoid starting a worker.")
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to four times the number of available processors. Set it to 0 to avoid starting a worker.")
private int workerThread = defaultWorkerThread();
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
@@ -113,27 +113,26 @@ public class StandAloneCommand extends AbstractServerCommand {
}
}
try (StandAloneRunner standAloneRunner = applicationContext.getBean(StandAloneRunner.class)) {
StandAloneRunner standAloneRunner = applicationContext.getBean(StandAloneRunner.class);
if (this.workerThread == 0) {
standAloneRunner.setWorkerEnabled(false);
} else {
standAloneRunner.setWorkerThread(this.workerThread);
}
if (this.indexerDisabled) {
standAloneRunner.setIndexerEnabled(false);
}
standAloneRunner.run();
if (fileWatcher != null) {
fileWatcher.startListeningFromConfig();
}
Await.until(() -> !this.applicationContext.isRunning());
if (this.workerThread == 0) {
standAloneRunner.setWorkerEnabled(false);
} else {
standAloneRunner.setWorkerThread(this.workerThread);
}
if (this.indexerDisabled) {
standAloneRunner.setIndexerEnabled(false);
}
standAloneRunner.run();
if (fileWatcher != null) {
fileWatcher.startListeningFromConfig();
}
Await.until(() -> !this.applicationContext.isRunning());
return 0;
}
}

View File

@@ -2,7 +2,7 @@ package io.kestra.cli.commands.servers;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.models.ServerType;
import io.kestra.core.runners.Indexer;
import io.kestra.core.runners.IndexerInterface;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.ExecutorsUtils;
import io.kestra.core.services.SkipExecutionService;
@@ -65,7 +65,7 @@ public class WebServerCommand extends AbstractServerCommand {
if (!indexerDisabled) {
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
poolExecutor.execute(applicationContext.getBean(Indexer.class));
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
shutdownHook(false, () -> poolExecutor.shutdown());
}

View File

@@ -22,7 +22,7 @@ public class WorkerCommand extends AbstractServerCommand {
@Inject
private ApplicationContext applicationContext;
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to eight times the number of available processors")
@Option(names = {"-t", "--thread"}, description = "The max number of worker threads, defaults to four times the number of available processors")
private int thread = defaultWorkerThread();
@Option(names = {"-g", "--worker-group"}, description = "The worker group key, must match the regex [a-zA-Z0-9_-]+ (EE only)")

View File

@@ -6,8 +6,7 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.ExecutionQueued;
import io.kestra.core.services.ConcurrencyLimitService;
import io.kestra.jdbc.runner.AbstractJdbcExecutionQueuedStateStore;
import io.kestra.jdbc.runner.AbstractJdbcExecutionQueuedStorage;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import jakarta.inject.Named;
@@ -16,6 +15,8 @@ import picocli.CommandLine;
import java.util.Optional;
import static io.kestra.core.utils.Rethrow.throwConsumer;
@CommandLine.Command(
name = "submit-queued-execution",
description = {"Submit all queued execution to the executor",
@@ -47,12 +48,10 @@ public class SubmitQueuedCommand extends AbstractCommand {
return 1;
}
else if (queueType.get().equals("postgres") || queueType.get().equals("mysql") || queueType.get().equals("h2")) {
var executionQueuedStorage = applicationContext.getBean(AbstractJdbcExecutionQueuedStateStore.class);
var concurrencyLimitService = applicationContext.getBean(ConcurrencyLimitService.class);
var executionQueuedStorage = applicationContext.getBean(AbstractJdbcExecutionQueuedStorage.class);
for (ExecutionQueued queued : executionQueuedStorage.getAllForAllTenants()) {
Execution restart = concurrencyLimitService.unqueue(queued.getExecution(), State.Type.RUNNING);
executionQueue.emit(restart);
executionQueuedStorage.pop(queued.getTenantId(), queued.getNamespace(), queued.getFlowId(), throwConsumer(execution -> executionQueue.emit(execution.withState(State.Type.CREATED))));
cpt++;
}
}

View File

@@ -1,6 +1,7 @@
package io.kestra.cli.commands.sys;
import io.kestra.cli.commands.sys.database.DatabaseCommand;
import io.kestra.cli.commands.sys.statestore.StateStoreCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.extern.slf4j.Slf4j;
import io.kestra.cli.AbstractCommand;
@@ -15,6 +16,7 @@ import picocli.CommandLine;
ReindexCommand.class,
DatabaseCommand.class,
SubmitQueuedCommand.class,
StateStoreCommand.class
}
)
@Slf4j

View File

@@ -0,0 +1,27 @@
package io.kestra.cli.commands.sys.statestore;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import picocli.CommandLine;
@CommandLine.Command(
name = "state-store",
description = "Manage Kestra State Store",
mixinStandardHelpOptions = true,
subcommands = {
StateStoreMigrateCommand.class,
}
)
public class StateStoreCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "sys", "state-store", "--help");
return 0;
}
}

View File

@@ -0,0 +1,81 @@
package io.kestra.cli.commands.sys.statestore;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.RunContextFactory;
import io.kestra.core.storages.StateStore;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.Slugify;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
@CommandLine.Command(
name = "migrate",
description = "Migrate old state store files to use the new KV Store implementation.",
mixinStandardHelpOptions = true
)
@Slf4j
public class StateStoreMigrateCommand extends AbstractCommand {
@Inject
private ApplicationContext applicationContext;
@Override
public Integer call() throws Exception {
super.call();
FlowRepositoryInterface flowRepository = this.applicationContext.getBean(FlowRepositoryInterface.class);
StorageInterface storageInterface = this.applicationContext.getBean(StorageInterface.class);
RunContextFactory runContextFactory = this.applicationContext.getBean(RunContextFactory.class);
flowRepository.findAllForAllTenants().stream().map(flow -> Map.entry(flow, List.of(
URI.create("/" + flow.getNamespace().replace(".", "/") + "/" + Slugify.of(flow.getId()) + "/states"),
URI.create("/" + flow.getNamespace().replace(".", "/") + "/states")
))).map(potentialStateStoreUrisForAFlow -> Map.entry(potentialStateStoreUrisForAFlow.getKey(), potentialStateStoreUrisForAFlow.getValue().stream().flatMap(uri -> {
try {
return storageInterface.allByPrefix(potentialStateStoreUrisForAFlow.getKey().getTenantId(), potentialStateStoreUrisForAFlow.getKey().getNamespace(), uri, false).stream();
} catch (IOException e) {
return Stream.empty();
}
}).toList())).forEach(stateStoreFileUrisForAFlow -> stateStoreFileUrisForAFlow.getValue().forEach(stateStoreFileUri -> {
Flow flow = stateStoreFileUrisForAFlow.getKey();
String[] flowQualifierWithStateQualifiers = stateStoreFileUri.getPath().split("/states/");
String[] statesUriPart = flowQualifierWithStateQualifiers[1].split("/");
String stateName = statesUriPart[0];
String taskRunValue = statesUriPart.length > 2 ? statesUriPart[1] : null;
String stateSubName = statesUriPart[statesUriPart.length - 1];
boolean flowScoped = flowQualifierWithStateQualifiers[0].endsWith("/" + flow.getId());
StateStore stateStore = new StateStore(runContext(runContextFactory, flow), false);
try (InputStream is = storageInterface.get(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri)) {
stateStore.putState(flowScoped, stateName, stateSubName, taskRunValue, is.readAllBytes());
storageInterface.delete(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri);
} catch (IOException e) {
throw new RuntimeException(e);
}
}));
stdOut("Successfully ran the state-store migration.");
return 0;
}
private RunContext runContext(RunContextFactory runContextFactory, Flow flow) {
Map<String, String> flowVariables = new HashMap<>();
flowVariables.put("tenantId", flow.getTenantId());
flowVariables.put("id", flow.getId());
flowVariables.put("namespace", flow.getNamespace());
return runContextFactory.of(flow, Map.of("flow", flowVariables));
}
}

View File

@@ -0,0 +1,34 @@
package io.kestra.cli.commands.templates;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.kestra.cli.commands.templates.namespaces.TemplateNamespaceCommand;
import io.kestra.core.models.templates.TemplateEnabled;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "template",
description = "Manage templates",
mixinStandardHelpOptions = true,
subcommands = {
TemplateNamespaceCommand.class,
TemplateValidateCommand.class,
TemplateExportCommand.class,
}
)
@Slf4j
@TemplateEnabled
public class TemplateCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "template", "--help");
return 0;
}
}

View File

@@ -0,0 +1,61 @@
package io.kestra.cli.commands.templates;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.templates.TemplateEnabled;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.nio.file.Files;
import java.nio.file.Path;
@CommandLine.Command(
name = "export",
description = "Export templates to a ZIP file",
mixinStandardHelpOptions = true
)
@Slf4j
@TemplateEnabled
public class TemplateExportCommand extends AbstractApiCommand {
private static final String DEFAULT_FILE_NAME = "templates.zip";
@Inject
private TenantIdSelectorService tenantService;
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
public String namespace;
@CommandLine.Parameters(index = "0", description = "The directory to export the file to")
public Path directory;
@Override
public Integer call() throws Exception {
super.call();
try(DefaultHttpClient client = client()) {
MutableHttpRequest<Object> request = HttpRequest
.GET(apiUri("/templates/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
.accept(MediaType.APPLICATION_OCTET_STREAM);
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
Path zipFile = Path.of(directory.toString(), DEFAULT_FILE_NAME);
zipFile.toFile().createNewFile();
Files.write(zipFile, response.body());
stdOut("Exporting template(s) for namespace '" + namespace + "' successfully done !");
} catch (HttpClientResponseException e) {
AbstractValidateCommand.handleHttpException(e, "template");
return 1;
}
return 0;
}
}

View File

@@ -0,0 +1,35 @@
package io.kestra.cli.commands.templates;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.templates.TemplateEnabled;
import io.kestra.core.models.validations.ModelValidator;
import jakarta.inject.Inject;
import picocli.CommandLine;
import java.util.Collections;
@CommandLine.Command(
name = "validate",
description = "Validate a template"
)
@TemplateEnabled
public class TemplateValidateCommand extends AbstractValidateCommand {
@Inject
private ModelValidator modelValidator;
@Override
public Integer call() throws Exception {
return this.call(
Template.class,
modelValidator,
(Object object) -> {
Template template = (Template) object;
return template.getNamespace() + " / " + template.getId();
},
(Object object) -> Collections.emptyList(),
(Object object) -> Collections.emptyList()
);
}
}

View File

@@ -0,0 +1,31 @@
package io.kestra.cli.commands.templates.namespaces;
import io.kestra.cli.AbstractCommand;
import io.kestra.cli.App;
import io.kestra.core.models.templates.TemplateEnabled;
import io.micronaut.configuration.picocli.PicocliRunner;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "namespace",
description = "Manage namespace templates",
mixinStandardHelpOptions = true,
subcommands = {
TemplateNamespaceUpdateCommand.class,
}
)
@Slf4j
@TemplateEnabled
public class TemplateNamespaceCommand extends AbstractCommand {
@SneakyThrows
@Override
public Integer call() throws Exception {
super.call();
PicocliRunner.call(App.class, "template", "namespace", "--help");
return 0;
}
}

View File

@@ -0,0 +1,74 @@
package io.kestra.cli.commands.templates.namespaces;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.templates.TemplateEnabled;
import io.kestra.core.serializers.YamlParser;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.nio.file.Files;
import java.util.List;
import jakarta.validation.ConstraintViolationException;
@CommandLine.Command(
name = "update",
description = "Update namespace templates",
mixinStandardHelpOptions = true
)
@Slf4j
@TemplateEnabled
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@Inject
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
super.call();
try (var files = Files.walk(directory)) {
List<Template> templates = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.map(path -> YamlParser.parse(path.toFile(), Template.class))
.toList();
if (templates.isEmpty()) {
stdOut("No template found on '{}'", directory.toFile().getAbsolutePath());
}
try (DefaultHttpClient client = client()) {
MutableHttpRequest<List<Template>> request = HttpRequest
.POST(apiUri("/templates/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, templates);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),
Argument.listOf(UpdateResult.class)
);
stdOut(updated.size() + " template(s) for namespace '" + namespace + "' successfully updated !");
updated.forEach(template -> stdOut("- " + template.getNamespace() + "." + template.getId()));
} catch (HttpClientResponseException e) {
AbstractValidateCommand.handleHttpException(e, "template");
return 1;
}
} catch (ConstraintViolationException e) {
AbstractValidateCommand.handleException(e, "template");
return 1;
}
return 0;
}
}

View File

@@ -1,69 +0,0 @@
package io.kestra.cli.listeners;
import io.kestra.core.server.LocalServiceState;
import io.kestra.core.server.Service;
import io.kestra.core.server.ServiceRegistry;
import io.micronaut.context.annotation.Requires;
import io.micronaut.context.event.ApplicationEventListener;
import io.micronaut.context.event.ShutdownEvent;
import io.micronaut.core.annotation.Order;
import io.micronaut.core.order.Ordered;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ForkJoinPool;
/**
* Global application shutdown handler.
* This handler gets effectively invoked before {@link jakarta.annotation.PreDestroy} does.
*/
@Singleton
@Slf4j
@Order(Ordered.LOWEST_PRECEDENCE)
@Requires(property = "kestra.server-type")
public class GracefulEmbeddedServiceShutdownListener implements ApplicationEventListener<ShutdownEvent> {
@Inject
ServiceRegistry serviceRegistry;
/**
* {@inheritDoc}
**/
@Override
public boolean supports(ShutdownEvent event) {
return ApplicationEventListener.super.supports(event);
}
/**
* Wait for services' close actions
*
* @param event the event to respond to
*/
@Override
public void onApplicationEvent(ShutdownEvent event) {
List<LocalServiceState> states = serviceRegistry.all();
if (states.isEmpty()) {
return;
}
log.debug("Shutdown event received");
List<CompletableFuture<Void>> futures = states.stream()
.map(state -> CompletableFuture.runAsync(() -> closeService(state), ForkJoinPool.commonPool()))
.toList();
// Wait for all services to close, before shutting down the embedded server
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
}
private void closeService(LocalServiceState state) {
final Service service = state.service();
try {
service.unwrap().close();
} catch (Exception e) {
log.error("[Service id={}, type={}] Unexpected error on close", service.getId(), service.getType(), e);
}
}
}

View File

@@ -10,21 +10,24 @@ import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.core.services.PluginDefaultService;
import io.micronaut.context.annotation.Requires;
import io.micronaut.context.annotation.Value;
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
import jakarta.annotation.Nullable;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import java.util.concurrent.CopyOnWriteArrayList;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
@Singleton
@Slf4j
@Requires(property = "micronaut.io.watch.enabled", value = "true")
@@ -46,9 +49,13 @@ public class FileChangedEventListener {
@Inject
protected FlowListenersInterface flowListeners;
@Nullable
@Value("${micronaut.io.watch.tenantId}")
private String tenantId;
FlowFilesManager flowFilesManager;
private List<FlowWithPath> flows = new CopyOnWriteArrayList<>();
private List<FlowWithPath> flows = new ArrayList<>();
private boolean isStarted = false;
@@ -106,6 +113,8 @@ public class FileChangedEventListener {
}
public void startListening(List<Path> paths) throws IOException, InterruptedException {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
for (Path path : paths) {
path.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
}
@@ -148,20 +157,12 @@ public class FileChangedEventListener {
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
}
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(filePath), content));
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
}
} catch (NoSuchFileException e) {
log.warn("File not found: {}, deleting it", entry, e);
// the file might have been deleted while reading so if not found we try to delete the flow
flows.stream()
.filter(flow -> flow.getPath().equals(filePath.toString()))
.findFirst()
.ifPresent(flowWithPath -> {
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
});
log.error("File not found: {}", entry, e);
} catch (IOException e) {
log.error("Error reading file: {}", entry, e);
}
@@ -192,6 +193,8 @@ public class FileChangedEventListener {
}
private void loadFlowsFromFolder(Path folder) {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
try {
Files.walkFileTree(folder, new SimpleFileVisitor<Path>() {
@Override
@@ -211,7 +214,7 @@ public class FileChangedEventListener {
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
flows.add(FlowWithPath.of(flow.get(), file.toString()));
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(file), content));
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
}
}
return FileVisitResult.CONTINUE;
@@ -235,8 +238,10 @@ public class FileChangedEventListener {
}
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
String tenantId = this.tenantId != null ? this.tenantId : MAIN_TENANT;
try {
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(getTenantIdFromPath(entry), content, false);
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
modelValidator.validate(flow);
return Optional.of(flow);
} catch (ConstraintViolationException | FlowProcessingException e) {
@@ -260,10 +265,4 @@ public class FileChangedEventListener {
private Path buildPath(FlowInterface flow) {
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
}
private String getTenantIdFromPath(Path path) {
// FIXME there is probably a bug here when a tenant has '_' in its name,
// a valid tenant name is defined with following regex: "^[a-z0-9][a-z0-9_-]*"
return path.getFileName().toString().split("_")[0];
}
}

View File

@@ -16,11 +16,4 @@ public class TenantIdSelectorService {
}
return MAIN_TENANT;
}
public String getTenantIdAndAllowEETenants(String tenantId) {
if (StringUtils.isNotBlank(tenantId)){
return tenantId;
}
return MAIN_TENANT;
}
}

View File

@@ -18,10 +18,6 @@ micronaut:
root:
paths: classpath:root
mapping: /**
codec:
json:
additional-types:
- application/scim+json
server:
max-request-size: 10GB
multipart:
@@ -31,7 +27,6 @@ micronaut:
write-idle-timeout: 60m
idle-timeout: 60m
netty:
max-zstd-encode-size: 67108864 # increased to 64MB from the default of 32MB
max-chunk-size: 10MB
max-header-size: 32768 # increased from the default of 8k
responses:
@@ -49,8 +44,6 @@ micronaut:
- /ui/.+
- /health
- /health/.+
- /metrics
- /metrics/.+
- /prometheus
http-version: HTTP_1_1
caches:
@@ -84,19 +77,8 @@ micronaut:
type: scheduled
core-pool-size: 1
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
metrics:
binders:
retry:
enabled: true
netty:
queues:
enabled: true
bytebuf-allocators:
enabled: true
channels:
enabled: true
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
export:
otlp:
enabled: false
@@ -109,8 +91,6 @@ jackson:
serialization-inclusion: non_null
deserialization:
FAIL_ON_UNKNOWN_PROPERTIES: false
mapper:
ACCEPT_CASE_INSENSITIVE_ENUMS: true
endpoints:
all:
@@ -119,10 +99,6 @@ endpoints:
sensitive: false
health:
details-visible: ANONYMOUS
disk-space:
enabled: false
discovery-client:
enabled: false
loggers:
write-sensitive: false
env:
@@ -156,47 +132,12 @@ kestra:
tutorial-flows:
# Automatically loads all tutorial flows at startup.
enabled: true
retries:
attempts: 5
multiplier: 2.0
delay: 1s
maxDelay: ""
server:
basic-auth:
# These URLs will not be authenticated, by default we open some of the Micronaut default endpoints but not all for security reasons
open-urls:
- "/ping"
- "/api/v1/executions/webhook/"
- "/api/v1/main/executions/webhook/"
- "/api/v1/*/executions/webhook/"
- "/api/v1/basicAuthValidationErrors"
preview:
initial-rows: 100
max-rows: 5000
# The expected time for this server to complete all its tasks before initiating a graceful shutdown.
terminationGracePeriod: 5m
workerTaskRestartStrategy: AFTER_TERMINATION_GRACE_PERIOD
# Configuration for Liveness and Heartbeat mechanism between servers.
liveness:
enabled: true
# The expected time between liveness probe.
interval: 10s
# The timeout used to detect service failures.
timeout: 1m
# The time to wait before executing a liveness probe.
initialDelay: 1m
# The expected time between service heartbeats.
heartbeatInterval: 3s
service:
purge:
initial-delay: 1h
fixed-delay: 1d
retention: 30d
jdbc:
queues:
min-poll-interval: 25ms
@@ -208,7 +149,7 @@ kestra:
fixed-delay: 1h
retention: 7d
types:
- type: io.kestra.core.models.executions.LogEntry
- type : io.kestra.core.models.executions.LogEntry
retention: 1h
- type: io.kestra.core.models.executions.MetricEntry
retention: 1h
@@ -240,16 +181,38 @@ kestra:
traces:
root: DISABLED
ui-anonymous-usage-report:
enabled: true
ui:
charts:
default-duration: P30D
server:
basic-auth:
enabled: false
# These URLs will not be authenticated, by default we open some of the Micronaut default endpoints but not all for security reasons
open-urls:
- "/ping"
- "/api/v1/executions/webhook/"
preview:
initial-rows: 100
max-rows: 5000
# The expected time for this server to complete all its tasks before initiating a graceful shutdown.
terminationGracePeriod: 5m
workerTaskRestartStrategy: AFTER_TERMINATION_GRACE_PERIOD
# Configuration for Liveness and Heartbeat mechanism between servers.
liveness:
enabled: true
# The expected time between liveness probe.
interval: 10s
# The timeout used to detect service failures.
timeout: 1m
# The time to wait before executing a liveness probe.
initialDelay: 1m
# The expected time between service heartbeats.
heartbeatInterval: 3s
service:
purge:
initial-delay: 1h
fixed-delay: 1d
retention: 30d
anonymous-usage-report:
enabled: true
uri: https://api.kestra.io/v1/reports/server-events
uri: https://api.kestra.io/v1/reports/usages
initial-delay: 5m
fixed-delay: 1h
@@ -265,4 +228,4 @@ otel:
- /health
- /env
- /prometheus
propagators: tracecontext, baggage
propagators: tracecontext, baggage

View File

@@ -37,7 +37,7 @@ class AppTest {
final String[] args = new String[]{"server", serverType, "--help"};
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, args)) {
try (ApplicationContext ctx = App.applicationContext(App.class, args)) {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
@@ -52,7 +52,7 @@ class AppTest {
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, argsWithMissingParams)) {
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
assertThat(out.toString()).startsWith("Missing required parameters: ");

View File

@@ -1,4 +1,4 @@
package io.kestra.core.validations;
package io.kestra.cli;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.exceptions.BeanInstantiationException;

View File

@@ -1,76 +0,0 @@
package io.kestra.cli.commands.configs.sys;
import io.kestra.cli.commands.flows.FlowCreateCommand;
import io.kestra.cli.commands.namespaces.kv.KvCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.runtime.server.EmbeddedServer;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Verifies CLI behavior without repository configuration:
* - Repo-independent commands succeed (e.g. KV with no params).
* - Repo-dependent commands fail with a clear error.
*/
class NoConfigCommandTest {
@Test
void shouldSucceedWithNamespaceKVCommandWithoutParamsAndConfig() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
String[] args = {};
Integer call = PicocliRunner.call(KvCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra namespace kv");
}
}
@Test
void shouldFailWithCreateFlowCommandWithoutConfig() throws URISyntaxException {
URL flowUrl = NoConfigCommandTest.class.getClassLoader().getResource("crudFlow/date.yml");
Objects.requireNonNull(flowUrl, "Test flow resource not found");
Path flowPath = Paths.get(flowUrl.toURI());
ByteArrayOutputStream out = new ByteArrayOutputStream();
ByteArrayOutputStream err=new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
System.setErr(new PrintStream(err));
try (ApplicationContext ctx = ApplicationContext.builder()
.deduceEnvironment(false)
.start()) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] createArgs = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
flowPath.toString(),
};
Integer exitCode = PicocliRunner.call(FlowCreateCommand.class, ctx, createArgs);
assertThat(exitCode).isNotZero();
assertThat(out.toString()).isEmpty();
assertThat(err.toString()).contains("No bean of type [io.kestra.core.repositories.FlowRepositoryInterface] exists");
}
}
}

View File

@@ -14,7 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat;
class FlowDotCommandTest {
@Test
void run() {
URL directory = FlowDotCommandTest.class.getClassLoader().getResource("flows/same/first.yaml");
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("flows/same/first.yaml");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));

View File

@@ -0,0 +1,41 @@
package io.kestra.cli.commands.flows;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
class FlowExpandCommandTest {
@SuppressWarnings("deprecation")
@Test
void run() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
String[] args = {
"src/test/resources/helper/include.yaml"
};
Integer call = PicocliRunner.call(FlowExpandCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).isEqualTo("id: include\n" +
"namespace: io.kestra.cli\n" +
"\n" +
"# The list of tasks\n" +
"tasks:\n" +
"- id: t1\n" +
" type: io.kestra.plugin.core.debug.Return\n" +
" format: \"Lorem ipsum dolor sit amet\"\n" +
"- id: t2\n" +
" type: io.kestra.plugin.core.debug.Return\n" +
" format: |\n" +
" Lorem ipsum dolor sit amet\n" +
" Lorem ipsum dolor sit amet\n");
}
}
}

View File

@@ -27,26 +27,6 @@ class FlowValidateCommandTest {
}
}
@Test
// github action kestra-io/validate-action requires being able to validate Flows from OSS CLI against a remote EE instance
void runForEEInstance() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
String[] args = {
"--tenant",
"some-ee-tenant",
"--local",
"src/test/resources/helper/include.yaml"
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
}
}
@Test
void warning() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -61,6 +41,7 @@ class FlowValidateCommandTest {
assertThat(call).isZero();
assertThat(out.toString()).contains("✓ - system / warning");
assertThat(out.toString()).contains("⚠ - tasks[0] is deprecated");
assertThat(out.toString()).contains(" - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log");
}
}

View File

@@ -0,0 +1,62 @@
package io.kestra.cli.commands.flows;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import io.micronaut.runtime.server.EmbeddedServer;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import static org.assertj.core.api.Assertions.assertThat;
class TemplateValidateCommandTest {
@Test
void runLocal() {
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalids/empty.yaml");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setErr(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
String[] args = {
"--local",
directory.getPath()
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse flow");
assertThat(out.toString()).contains("must not be empty");
}
}
@Test
void runServer() {
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalids/empty.yaml");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setErr(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] args = {
"--plugins",
"/tmp", // pass this arg because it can cause failure
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
directory.getPath()
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse flow");
assertThat(out.toString()).contains("must not be empty");
}
}
}

View File

@@ -1,147 +0,0 @@
package io.kestra.cli.commands.migrations.metadata;
import io.kestra.cli.App;
import io.kestra.core.exceptions.ResourceExpiredException;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.kv.PersistedKvMetadata;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.StorageObject;
import io.kestra.core.storages.kv.*;
import io.kestra.core.tenant.TenantService;
import io.kestra.core.utils.TestsUtils;
import io.kestra.plugin.core.log.Log;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import io.micronaut.core.annotation.NonNull;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
public class KvMetadataMigrationCommandTest {
@Test
void run() throws IOException, ResourceExpiredException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
ByteArrayOutputStream err = new ByteArrayOutputStream();
System.setErr(new PrintStream(err));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
/* Initial setup:
* - namespace 1: key, description, value
* - namespace 1: expiredKey
* - namespace 2: anotherKey, anotherDescription
* - Nothing in database */
String namespace = TestsUtils.randomNamespace();
String key = "myKey";
StorageInterface storage = ctx.getBean(StorageInterface.class);
String description = "Some description";
String value = "someValue";
putOldKv(storage, namespace, key, description, value);
String anotherNamespace = TestsUtils.randomNamespace();
String anotherKey = "anotherKey";
String anotherDescription = "another description";
putOldKv(storage, anotherNamespace, anotherKey, anotherDescription, "anotherValue");
String tenantId = TenantService.MAIN_TENANT;
// Expired KV should not be migrated + should be purged from the storage
String expiredKey = "expiredKey";
putOldKv(storage, namespace, expiredKey, Instant.now().minus(Duration.ofMinutes(5)), "some expired description", "expiredValue");
assertThat(storage.exists(tenantId, null, getKvStorageUri(namespace, expiredKey))).isTrue();
KvMetadataRepositoryInterface kvMetadataRepository = ctx.getBean(KvMetadataRepositoryInterface.class);
assertThat(kvMetadataRepository.findByName(tenantId, namespace, key).isPresent()).isFalse();
/* Expected outcome from the migration command:
* - no KV has been migrated because no flow exist in the namespace so they are not picked up because we don't know they exist */
String[] kvMetadataMigrationCommand = {
"migrate", "metadata", "kv"
};
PicocliRunner.call(App.class, ctx, kvMetadataMigrationCommand);
assertThat(out.toString()).contains("✅ KV Metadata migration complete.");
// Still it's not in the metadata repository because no flow exist to find that kv
assertThat(kvMetadataRepository.findByName(tenantId, namespace, key).isPresent()).isFalse();
assertThat(kvMetadataRepository.findByName(tenantId, anotherNamespace, anotherKey).isPresent()).isFalse();
// A flow is created from namespace 1, so the KV in this namespace should be migrated
FlowRepositoryInterface flowRepository = ctx.getBean(FlowRepositoryInterface.class);
flowRepository.create(GenericFlow.of(Flow.builder()
.tenantId(tenantId)
.id("a-flow")
.namespace(namespace)
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
.build()));
/* We run the migration again:
* - namespace 1 KV is seen and metadata is migrated to database
* - namespace 2 KV is not seen because no flow exist in this namespace
* - expiredKey is deleted from storage and not migrated */
out.reset();
PicocliRunner.call(App.class, ctx, kvMetadataMigrationCommand);
assertThat(out.toString()).contains("✅ KV Metadata migration complete.");
Optional<PersistedKvMetadata> foundKv = kvMetadataRepository.findByName(tenantId, namespace, key);
assertThat(foundKv.isPresent()).isTrue();
assertThat(foundKv.get().getDescription()).isEqualTo(description);
assertThat(kvMetadataRepository.findByName(tenantId, anotherNamespace, anotherKey).isPresent()).isFalse();
KVStore kvStore = new InternalKVStore(tenantId, namespace, storage, kvMetadataRepository);
Optional<KVEntry> actualKv = kvStore.get(key);
assertThat(actualKv.isPresent()).isTrue();
assertThat(actualKv.get().description()).isEqualTo(description);
Optional<KVValue> actualValue = kvStore.getValue(key);
assertThat(actualValue.isPresent()).isTrue();
assertThat(actualValue.get().value()).isEqualTo(value);
assertThat(kvMetadataRepository.findByName(tenantId, namespace, expiredKey).isPresent()).isFalse();
assertThat(storage.exists(tenantId, null, getKvStorageUri(namespace, expiredKey))).isFalse();
/* We run one last time the migration without any change to verify that we don't resave an existing metadata.
* It covers the case where user didn't perform the migrate command yet but they played and added some KV from the UI (so those ones will already be in metadata database). */
out.reset();
PicocliRunner.call(App.class, ctx, kvMetadataMigrationCommand);
assertThat(out.toString()).contains("✅ KV Metadata migration complete.");
foundKv = kvMetadataRepository.findByName(tenantId, namespace, key);
assertThat(foundKv.get().getVersion()).isEqualTo(1);
}
}
private static void putOldKv(StorageInterface storage, String namespace, String key, String description, String value) throws IOException {
putOldKv(storage, namespace, key, Instant.now().plus(Duration.ofMinutes(5)), description, value);
}
private static void putOldKv(StorageInterface storage, String namespace, String key, Instant expirationDate, String description, String value) throws IOException {
URI kvStorageUri = getKvStorageUri(namespace, key);
KVValueAndMetadata kvValueAndMetadata = new KVValueAndMetadata(new KVMetadata(description, expirationDate), value);
storage.put(TenantService.MAIN_TENANT, namespace, kvStorageUri, new StorageObject(
kvValueAndMetadata.metadataAsMap(),
new ByteArrayInputStream(JacksonMapper.ofIon().writeValueAsBytes(kvValueAndMetadata.value()))
));
}
private static @NonNull URI getKvStorageUri(String namespace, String key) {
return URI.create(StorageContext.KESTRA_PROTOCOL + StorageContext.kvPrefix(namespace) + "/" + key + ".ion");
}
}

View File

@@ -1,29 +0,0 @@
package io.kestra.cli.commands.migrations.metadata;
import io.kestra.cli.App;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
public class SecretsMetadataMigrationCommandTest {
@Test
void run() {
ByteArrayOutputStream err = new ByteArrayOutputStream();
System.setErr(new PrintStream(err));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
String[] secretMetadataMigrationCommand = {
"migrate", "metadata", "secrets"
};
PicocliRunner.call(App.class, ctx, secretMetadataMigrationCommand);
assertThat(err.toString()).contains("❌ Secrets Metadata migration failed: Secret migration is not needed in the OSS version");
}
}
}

View File

@@ -0,0 +1,27 @@
package io.kestra.cli.commands.sys.statestore;
import io.kestra.cli.commands.sys.database.DatabaseCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
class StateStoreCommandTest {
@Test
void runWithNoParam() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
String[] args = {};
Integer call = PicocliRunner.call(StateStoreCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra sys state-store");
}
}
}

View File

@@ -0,0 +1,75 @@
package io.kestra.cli.commands.sys.statestore;
import io.kestra.core.exceptions.MigrationRequiredException;
import io.kestra.core.exceptions.ResourceExpiredException;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.RunContextFactory;
import io.kestra.core.storages.StateStore;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.Hashing;
import io.kestra.core.utils.Slugify;
import io.kestra.plugin.core.log.Log;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
class StateStoreMigrateCommandTest {
@Test
void runMigration() throws IOException, ResourceExpiredException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).environments("test").start()) {
FlowRepositoryInterface flowRepository = ctx.getBean(FlowRepositoryInterface.class);
Flow flow = Flow.builder()
.tenantId("my-tenant")
.id("a-flow")
.namespace("some.valid.namespace." + ((int) (Math.random() * 1000000)))
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
.build();
flowRepository.create(GenericFlow.of(flow));
StorageInterface storage = ctx.getBean(StorageInterface.class);
String tenantId = flow.getTenantId();
URI oldStateStoreUri = URI.create("/" + flow.getNamespace().replace(".", "/") + "/" + Slugify.of("a-flow") + "/states/my-state/" + Hashing.hashToString("my-taskrun-value") + "/sub-name");
storage.put(
tenantId,
flow.getNamespace(),
oldStateStoreUri,
new ByteArrayInputStream("my-value".getBytes())
);
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isTrue();
RunContext runContext = ctx.getBean(RunContextFactory.class).of(flow, Map.of("flow", Map.of(
"tenantId", tenantId,
"id", flow.getId(),
"namespace", flow.getNamespace()
)));
StateStore stateStore = new StateStore(runContext, true);
Assertions.assertThrows(MigrationRequiredException.class, () -> stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value"));
String[] args = {};
Integer call = PicocliRunner.call(StateStoreMigrateCommand.class, ctx, args);
assertThat(new String(stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value").readAllBytes())).isEqualTo("my-value");
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isFalse();
assertThat(call).isZero();
}
}
}

View File

@@ -0,0 +1,65 @@
package io.kestra.cli.commands.templates;
import io.kestra.cli.commands.templates.namespaces.TemplateNamespaceUpdateCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import io.micronaut.runtime.server.EmbeddedServer;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URL;
import java.util.Map;
import java.util.zip.ZipFile;
import static org.assertj.core.api.Assertions.assertThat;
class TemplateExportCommandTest {
@Test
void run() throws IOException {
URL directory = TemplateExportCommandTest.class.getClassLoader().getResource("templates");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
// we use the update command to add templates to extract
String[] args = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"io.kestra.tests",
directory.getPath(),
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("3 template(s)");
// then we export them
String[] exportArgs = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"--namespace",
"io.kestra.tests",
"/tmp",
};
PicocliRunner.call(TemplateExportCommand.class, ctx, exportArgs);
File file = new File("/tmp/templates.zip");
assertThat(file.exists()).isTrue();
ZipFile zipFile = new ZipFile(file);
assertThat(zipFile.stream().count()).isEqualTo(3L);
file.delete();
}
}
}

View File

@@ -0,0 +1,61 @@
package io.kestra.cli.commands.templates;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import io.micronaut.runtime.server.EmbeddedServer;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
class TemplateValidateCommandTest {
@Test
void runLocal() {
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalidsTemplates/template.yml");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setErr(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
String[] args = {
"--local",
directory.getPath()
};
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse template");
assertThat(out.toString()).contains("must not be empty");
}
}
@Test
void runServer() {
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalidsTemplates/template.yml");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setErr(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] args = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
directory.getPath()
};
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
assertThat(call).isEqualTo(1);
assertThat(out.toString()).contains("Unable to parse template");
assertThat(out.toString()).contains("must not be empty");
}
}
}

View File

@@ -0,0 +1,26 @@
package io.kestra.cli.commands.templates.namespaces;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.assertj.core.api.Assertions.assertThat;
class TemplateNamespaceCommandTest {
@Test
void runWithNoParam() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
String[] args = {};
Integer call = PicocliRunner.call(TemplateNamespaceCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra template namespace");
}
}
}

View File

@@ -0,0 +1,112 @@
package io.kestra.cli.commands.templates.namespaces;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import io.micronaut.runtime.server.EmbeddedServer;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
class TemplateNamespaceUpdateCommandTest {
@Test
void run() {
URL directory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("templates");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] args = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"io.kestra.tests",
directory.getPath(),
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("3 template(s)");
}
}
@Test
void invalid() {
URL directory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("invalidsTemplates");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setErr(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] args = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"io.kestra.tests",
directory.getPath(),
};
Integer call = PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
// assertThat(call, is(1));
assertThat(out.toString()).contains("Unable to parse templates");
assertThat(out.toString()).contains("must not be empty");
}
}
@Test
void runNoDelete() {
URL directory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("templates");
URL subDirectory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("templates/templatesSubFolder");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] args = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"io.kestra.tests",
directory.getPath(),
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
assertThat(out.toString()).contains("3 template(s)");
String[] newArgs = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"io.kestra.tests",
subDirectory.getPath(),
"--no-delete"
};
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, newArgs);
assertThat(out.toString()).contains("1 template(s)");
}
}
}

View File

@@ -1,15 +1,13 @@
package io.kestra.cli.services;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.TestsUtils;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.*;
import org.junitpioneer.jupiter.RetryingTest;
import java.io.IOException;
import java.nio.file.Files;
@@ -19,8 +17,8 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junitpioneer.jupiter.RetryingTest;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static io.kestra.core.utils.Rethrow.throwRunnable;
import static org.assertj.core.api.Assertions.assertThat;
@@ -58,12 +56,10 @@ class FileChangedEventListenerTest {
}
}
@FlakyTest
@RetryingTest(2)
@RetryingTest(5) // Flaky on CI but always pass locally
void test() throws IOException, TimeoutException {
var tenant = TestsUtils.randomTenant(FileChangedEventListenerTest.class.getSimpleName(), "test");
// remove the flow if it already exists
flowRepository.findByIdWithSource(tenant, "io.kestra.tests.watch", "myflow").ifPresent(flow -> flowRepository.delete(flow));
flowRepository.findByIdWithSource(MAIN_TENANT, "io.kestra.tests.watch", "myflow").ifPresent(flow -> flowRepository.delete(flow));
// create a basic flow
String flow = """
@@ -75,34 +71,30 @@ class FileChangedEventListenerTest {
type: io.kestra.plugin.core.log.Log
message: Hello World! 🚀
""";
GenericFlow genericFlow = GenericFlow.fromYaml(tenant, flow);
Files.write(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"), flow.getBytes());
Files.write(Path.of(FILE_WATCH + "/myflow.yaml"), flow.getBytes());
Await.until(
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "myflow").isPresent(),
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isPresent(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);
Flow myflow = flowRepository.findById(tenant, "io.kestra.tests.watch", "myflow").orElseThrow();
Flow myflow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").orElseThrow();
assertThat(myflow.getTasks()).hasSize(1);
assertThat(myflow.getTasks().getFirst().getId()).isEqualTo("hello");
assertThat(myflow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
// delete the flow
Files.delete(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"));
Files.delete(Path.of(FILE_WATCH + "/myflow.yaml"));
Await.until(
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "myflow").isEmpty(),
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "myflow").isEmpty(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);
}
@FlakyTest
@RetryingTest(2)
@RetryingTest(5) // Flaky on CI but always pass locally
void testWithPluginDefault() throws IOException, TimeoutException {
var tenant = TestsUtils.randomTenant(FileChangedEventListenerTest.class.getName(), "testWithPluginDefault");
// remove the flow if it already exists
flowRepository.findByIdWithSource(tenant, "io.kestra.tests.watch", "pluginDefault").ifPresent(flow -> flowRepository.delete(flow));
flowRepository.findByIdWithSource(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").ifPresent(flow -> flowRepository.delete(flow));
// create a flow with plugin default
String pluginDefault = """
@@ -118,22 +110,21 @@ class FileChangedEventListenerTest {
values:
message: Hello World!
""";
GenericFlow genericFlow = GenericFlow.fromYaml(tenant, pluginDefault);
Files.write(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"), pluginDefault.getBytes());
Files.write(Path.of(FILE_WATCH + "/plugin-default.yaml"), pluginDefault.getBytes());
Await.until(
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "pluginDefault").isPresent(),
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isPresent(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);
Flow pluginDefaultFlow = flowRepository.findById(tenant, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
Flow pluginDefaultFlow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
assertThat(pluginDefaultFlow.getTasks()).hasSize(1);
assertThat(pluginDefaultFlow.getTasks().getFirst().getId()).isEqualTo("helloWithDefault");
assertThat(pluginDefaultFlow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
// delete both files
Files.delete(Path.of(FILE_WATCH + "/" + genericFlow.uidWithoutRevision() + ".yaml"));
Files.delete(Path.of(FILE_WATCH + "/plugin-default.yaml"));
Await.until(
() -> flowRepository.findById(tenant, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
() -> flowRepository.findById(MAIN_TENANT, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
);

View File

@@ -17,7 +17,7 @@ kestra:
central:
url: https://repo.maven.apache.org/maven2/
sonatype:
url: https://central.sonatype.com/repository/maven-snapshots/
url: https://s01.oss.sonatype.org/content/repositories/snapshots/
server:
liveness:
enabled: false

View File

@@ -3,8 +3,8 @@ namespace: system
tasks:
- id: deprecated
type: io.kestra.plugin.core.log.Log
message: Hello World
type: io.kestra.plugin.core.debug.Echo
format: Hello World
- id: alias
type: io.kestra.core.tasks.log.Log
message: I'm an alias

View File

@@ -56,23 +56,21 @@ component_management:
name: Tests
paths:
- tests/**
- component_id: ui
name: Ui
paths:
- ui/**
- component_id: webserver
name: Webserver
paths:
- webserver/**
ignore:
- ui/**
# we are not mature yet to have a ui code coverage
flag_management:
default_rules:
carryforward: true
statuses:
- type: project
target: 70%
threshold: 10%
target: 80%
threshold: 1%
- type: patch
target: 75%
threshold: 10%
target: 90%

View File

@@ -37,15 +37,14 @@ dependencies {
implementation 'nl.basjes.gitignore:gitignore-reader'
implementation group: 'dev.failsafe', name: 'failsafe'
implementation 'com.github.ben-manes.caffeine:caffeine'
implementation 'com.github.ksuid:ksuid:1.1.4'
api 'org.apache.httpcomponents.client5:httpclient5'
// plugins
implementation 'org.apache.maven.resolver:maven-resolver-impl'
implementation 'org.apache.maven.resolver:maven-resolver-supplier-mvn3'
implementation 'org.apache.maven.resolver:maven-resolver-supplier'
implementation 'org.apache.maven.resolver:maven-resolver-connector-basic'
implementation 'org.apache.maven.resolver:maven-resolver-transport-file'
implementation 'org.apache.maven.resolver:maven-resolver-transport-apache'
implementation 'org.apache.maven.resolver:maven-resolver-transport-http'
// scheduler
implementation group: 'com.cronutils', name: 'cron-utils'
@@ -63,10 +62,6 @@ dependencies {
exclude group: 'com.fasterxml.jackson.core'
}
// micrometer
implementation "io.micronaut.micrometer:micronaut-micrometer-observation"
implementation 'io.micrometer:micrometer-java21'
// test
testAnnotationProcessor project(':processor')
testImplementation project(':tests')
@@ -74,17 +69,12 @@ dependencies {
testImplementation project(':repository-memory')
testImplementation project(':runner-memory')
testImplementation project(':storage-local')
testImplementation project(':worker')
testImplementation project(':scheduler')
testImplementation project(':executor')
testImplementation "io.micronaut:micronaut-http-client"
testImplementation "io.micronaut:micronaut-http-server-netty"
testImplementation "io.micronaut:micronaut-management"
testImplementation "org.testcontainers:testcontainers:1.21.3"
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
testImplementation "org.bouncycastle:bcpkix-jdk18on"
testImplementation "org.wiremock:wiremock-jetty12"
testImplementation "org.testcontainers:testcontainers:1.21.1"
testImplementation "org.testcontainers:junit-jupiter:1.21.1"
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.80"
}

View File

@@ -7,8 +7,6 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
/**
* Top-level marker interface for Kestra's plugin of type App.
*/
@@ -20,6 +18,6 @@ public interface AppBlockInterface extends io.kestra.core.models.Plugin {
)
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
String getType();
}

View File

@@ -7,8 +7,6 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
/**
* Top-level marker interface for Kestra's plugin of type App.
*/
@@ -20,6 +18,6 @@ public interface AppPluginInterface extends io.kestra.core.models.Plugin {
)
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
String getType();
}

View File

@@ -1,26 +0,0 @@
package io.kestra.core.debug;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotNull;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
@AllArgsConstructor
@NoArgsConstructor
@Getter
public class Breakpoint {
@NotNull
private String id;
@Nullable
private String value;
public static Breakpoint of(String breakpoint) {
if (breakpoint.indexOf('.') > 0) {
return new Breakpoint(breakpoint.substring(0, breakpoint.indexOf('.')), breakpoint.substring(breakpoint.indexOf('.') + 1));
} else {
return new Breakpoint(breakpoint, null);
}
}
}

View File

@@ -6,17 +6,14 @@ import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
@Getter
@EqualsAndHashCode
@ToString
public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
private static final Map<PluginDocIdentifier, ClassPluginDocumentation<?>> CACHE = new ConcurrentHashMap<>();
private String icon;
private String group;
protected String docLicense;
@@ -81,12 +78,8 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
}
}
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, String version, boolean allProperties) {
//noinspection unchecked
return (ClassPluginDocumentation<T>) CACHE.computeIfAbsent(
new PluginDocIdentifier(plugin.type(), version, allProperties),
(key) -> new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, allProperties)
);
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, allProperties);
}
@AllArgsConstructor
@@ -97,11 +90,5 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
String unit;
String description;
}
private record PluginDocIdentifier(String pluginClassAndVersion, boolean allProperties) {
public PluginDocIdentifier(Class<?> pluginClass, String version, boolean allProperties) {
this(pluginClass.getName() + ":" + version, allProperties);
}
}
}

View File

@@ -227,7 +227,7 @@ public class DocumentationGenerator {
baseCls,
null
);
return ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, registeredPlugin.version(), true);
return ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, true);
})
.map(pluginDocumentation -> {
try {

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.dashboards.Dashboard;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.PluginDefault;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.triggers.AbstractTrigger;
import jakarta.inject.Singleton;
@@ -23,7 +24,6 @@ public class JsonSchemaCache {
private final JsonSchemaGenerator jsonSchemaGenerator;
private final ConcurrentMap<CacheKey, Map<String, Object>> schemaCache = new ConcurrentHashMap<>();
private final ConcurrentMap<SchemaType, Map<String, Object>> propertiesCache = new ConcurrentHashMap<>();
private final Map<SchemaType, Class<?>> classesBySchemaType = new HashMap<>();
@@ -35,6 +35,7 @@ public class JsonSchemaCache {
public JsonSchemaCache(final JsonSchemaGenerator jsonSchemaGenerator) {
this.jsonSchemaGenerator = Objects.requireNonNull(jsonSchemaGenerator, "JsonSchemaGenerator cannot be null");
registerClassForType(SchemaType.FLOW, Flow.class);
registerClassForType(SchemaType.TEMPLATE, Template.class);
registerClassForType(SchemaType.TASK, Task.class);
registerClassForType(SchemaType.TRIGGER, AbstractTrigger.class);
registerClassForType(SchemaType.PLUGINDEFAULT, PluginDefault.class);
@@ -43,7 +44,7 @@ public class JsonSchemaCache {
public Map<String, Object> getSchemaForType(final SchemaType type,
final boolean arrayOf) {
return schemaCache.computeIfAbsent(new CacheKey(type, arrayOf), key -> {
return schemaCache.computeIfAbsent(new CacheKey(type, arrayOf), (key) -> {
Class<?> cls = Optional.ofNullable(classesBySchemaType.get(type))
.orElseThrow(() -> new IllegalArgumentException("Cannot found schema for type '" + type + "'"));
@@ -51,16 +52,6 @@ public class JsonSchemaCache {
});
}
public Map<String, Object> getPropertiesForType(final SchemaType type) {
return propertiesCache.computeIfAbsent(type, key -> {
Class<?> cls = Optional.ofNullable(classesBySchemaType.get(type))
.orElseThrow(() -> new IllegalArgumentException("Cannot found properties for type '" + type + "'"));
return jsonSchemaGenerator.properties(null, cls);
});
}
// must be public as it's used in EE
public void registerClassForType(final SchemaType type, final Class<?> clazz) {
classesBySchemaType.put(type, clazz);
}

View File

@@ -15,7 +15,6 @@ import com.github.victools.jsonschema.generator.impl.DefinitionKey;
import com.github.victools.jsonschema.generator.naming.DefaultSchemaDefinitionNamingStrategy;
import com.github.victools.jsonschema.module.jackson.JacksonModule;
import com.github.victools.jsonschema.module.jackson.JacksonOption;
import com.github.victools.jsonschema.module.jackson.JsonUnwrappedDefinitionProvider;
import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationModule;
import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationOption;
import com.github.victools.jsonschema.module.swagger2.Swagger2Module;
@@ -23,6 +22,7 @@ import com.google.common.collect.ImmutableMap;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.dashboards.DataFilter;
import io.kestra.core.models.dashboards.DataFilterKPI;
import io.kestra.core.models.dashboards.charts.Chart;
@@ -45,9 +45,6 @@ import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.*;
import java.time.*;
@@ -56,14 +53,10 @@ import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static io.kestra.core.docs.AbstractClassDocumentation.flattenWithoutType;
import static io.kestra.core.docs.AbstractClassDocumentation.required;
import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
@Singleton
@Slf4j
public class JsonSchemaGenerator {
private static final List<Class<?>> TYPES_RESOLVED_AS_STRING = List.of(Duration.class, LocalTime.class, LocalDate.class, LocalDateTime.class, ZonedDateTime.class, OffsetDateTime.class, OffsetTime.class);
private static final List<Class<?>> SUBTYPE_RESOLUTION_EXCLUSION_FOR_PLUGIN_SCHEMA = List.of(Task.class, AbstractTrigger.class);
@@ -95,20 +88,12 @@ public class JsonSchemaGenerator {
}
public <T> Map<String, Object> schemas(Class<? extends T> cls, boolean arrayOf) {
return this.schemas(cls, arrayOf, Collections.emptyList());
}
public <T> Map<String, Object> schemas(Class<? extends T> cls, boolean arrayOf, List<String> allowedPluginTypes) {
return this.schemas(cls, arrayOf, allowedPluginTypes, false);
}
public <T> Map<String, Object> schemas(Class<? extends T> cls, boolean arrayOf, List<String> allowedPluginTypes, boolean withOutputs) {
SchemaGeneratorConfigBuilder builder = new SchemaGeneratorConfigBuilder(
SchemaVersion.DRAFT_7,
OptionPreset.PLAIN_JSON
);
this.build(builder, true, allowedPluginTypes, withOutputs);
this.build(builder, true);
SchemaGeneratorConfig schemaGeneratorConfig = builder.build();
@@ -123,7 +108,7 @@ public class JsonSchemaGenerator {
removeRequiredOnPropsWithDefaults(objectNode);
return MAPPER.convertValue(objectNode, MAP_TYPE_REFERENCE);
} catch (Exception e) {
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Unable to generate jsonschema for '" + cls.getName() + "'", e);
}
}
@@ -256,14 +241,6 @@ public class JsonSchemaGenerator {
}
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7) {
this.build(builder, draft7, Collections.emptyList());
}
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7, List<String> allowedPluginTypes) {
this.build(builder, draft7, allowedPluginTypes, false);
}
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7, List<String> allowedPluginTypes, boolean withOutputs) {
// builder.withObjectMapper(builder.getObjectMapper().configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false));
builder
.with(new JakartaValidationModule(
@@ -275,22 +252,8 @@ public class JsonSchemaGenerator {
.with(Option.DEFINITIONS_FOR_ALL_OBJECTS)
.with(Option.DEFINITION_FOR_MAIN_SCHEMA)
.with(Option.PLAIN_DEFINITION_KEYS)
.with(Option.ALLOF_CLEANUP_AT_THE_END);
.with(Option.ALLOF_CLEANUP_AT_THE_END);;
// HACK: Registered a custom JsonUnwrappedDefinitionProvider prior to the JacksonModule
// to be able to return an CustomDefinition with an empty node when the ResolvedType can't be found.
builder.forTypesInGeneral().withCustomDefinitionProvider(new JsonUnwrappedDefinitionProvider(){
@Override
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
try {
return super.provideCustomSchemaDefinition(javaType, context);
} catch (NoClassDefFoundError e) {
// This error happens when a non-supported plugin type exists in the classpath.
log.debug("Cannot create schema definition for type '{}'. Cause: NoClassDefFoundError", javaType.getTypeName());
return new CustomDefinition(context.getGeneratorConfig().createObjectNode(), true);
}
}
});
if (!draft7) {
builder.with(new JacksonModule(JacksonOption.IGNORE_TYPE_INFO_TRANSFORM));
} else {
@@ -319,7 +282,6 @@ public class JsonSchemaGenerator {
// inline some type
builder.forTypesInGeneral()
.withCustomDefinitionProvider(new CustomDefinitionProviderV2() {
@Override
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
if (javaType.isInstanceOf(Map.class) || javaType.isInstanceOf(Enum.class)) {
@@ -460,13 +422,6 @@ public class JsonSchemaGenerator {
if (pluginAnnotation.beta()) {
collectedTypeAttributes.put("$beta", true);
}
if (withOutputs) {
Map<String, Object> outputsSchema = this.outputs(null, scope.getType().getErasedType());
collectedTypeAttributes.set("outputs", context.getGeneratorConfig().createObjectNode().pojoNode(
flattenWithoutType(AbstractClassDocumentation.properties(outputsSchema), required(outputsSchema))
));
}
}
// handle deprecated tasks
@@ -502,7 +457,7 @@ public class JsonSchemaGenerator {
.withSubtypeResolver((declaredType, context) -> {
TypeContext typeContext = context.getTypeContext();
return this.subtypeResolver(declaredType, typeContext, allowedPluginTypes);
return this.subtypeResolver(declaredType, typeContext);
});
// description as Markdown
@@ -579,7 +534,7 @@ public class JsonSchemaGenerator {
return null;
}
return this.subtypeResolver(declaredType, typeContext, allowedPluginTypes);
return this.subtypeResolver(declaredType, typeContext);
});
}
@@ -662,12 +617,11 @@ public class JsonSchemaGenerator {
return false;
}
protected List<ResolvedType> subtypeResolver(ResolvedType declaredType, TypeContext typeContext, List<String> allowedPluginTypes) {
protected List<ResolvedType> subtypeResolver(ResolvedType declaredType, TypeContext typeContext) {
if (declaredType.getErasedType() == Task.class) {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getTasks().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
@@ -675,7 +629,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getTriggers().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
@@ -683,7 +636,14 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getConditions().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
} else if (declaredType.getErasedType() == ScheduleCondition.class) {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getConditions().stream())
.filter(ScheduleCondition.class::isAssignableFrom)
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
.toList();
@@ -691,7 +651,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getTaskRunners().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
.toList();
@@ -699,7 +658,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getLogExporters().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
.toList();
@@ -709,7 +667,6 @@ public class JsonSchemaGenerator {
.flatMap(registeredPlugin -> registeredPlugin.getAdditionalPlugins().stream())
// for additional plugins, we have one subtype by type of additional plugins (for ex: embedding store for Langchain4J), so we need to filter on the correct subtype
.filter(cls -> declaredType.getErasedType().isAssignableFrom(cls))
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(cls -> cls != declaredType.getErasedType())
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
@@ -718,7 +675,6 @@ public class JsonSchemaGenerator {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getCharts().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.<ResolvedType>mapMulti((clz, consumer) -> {
if (DataChart.class.isAssignableFrom(clz)) {
@@ -785,16 +741,12 @@ public class JsonSchemaGenerator {
}
protected <T> Map<String, Object> generate(Class<? extends T> cls, @Nullable Class<T> base) {
return this.generate(cls, base, Collections.emptyList());
}
protected <T> Map<String, Object> generate(Class<? extends T> cls, @Nullable Class<T> base, List<String> allowedPluginTypes) {
SchemaGeneratorConfigBuilder builder = new SchemaGeneratorConfigBuilder(
SchemaVersion.DRAFT_2019_09,
OptionPreset.PLAIN_JSON
);
this.build(builder, false, allowedPluginTypes);
this.build(builder, false);
// we don't return base properties unless specified with @PluginProperty and hidden is false
builder

View File

@@ -23,26 +23,29 @@ public class Plugin {
private String group;
private String version;
private Map<String, String> manifest;
private List<String> tasks;
private List<String> triggers;
private List<String> conditions;
private List<String> controllers;
private List<String> storages;
private List<String> secrets;
private List<String> taskRunners;
private List<String> guides;
private List<String> aliases;
private List<PluginElementMetadata> tasks;
private List<PluginElementMetadata> triggers;
private List<PluginElementMetadata> conditions;
private List<PluginElementMetadata> controllers;
private List<PluginElementMetadata> storages;
private List<PluginElementMetadata> secrets;
private List<PluginElementMetadata> taskRunners;
private List<PluginElementMetadata> apps;
private List<PluginElementMetadata> appBlocks;
private List<PluginElementMetadata> charts;
private List<PluginElementMetadata> dataFilters;
private List<PluginElementMetadata> dataFiltersKPI;
private List<PluginElementMetadata> logExporters;
private List<PluginElementMetadata> additionalPlugins;
private List<String> apps;
private List<String> appBlocks;
private List<String> charts;
private List<String> dataFilters;
private List<String> logExporters;
private List<String> additionalPlugins;
private List<PluginSubGroup.PluginCategory> categories;
private String subGroup;
public static Plugin of(RegisteredPlugin registeredPlugin, @Nullable String subgroup) {
return Plugin.of(registeredPlugin, subgroup, true);
}
public static Plugin of(RegisteredPlugin registeredPlugin, @Nullable String subgroup, boolean includeDeprecated) {
Plugin plugin = new Plugin();
plugin.name = registeredPlugin.name();
PluginSubGroup subGroupInfos = null;
@@ -87,19 +90,18 @@ public class Plugin {
plugin.subGroup = subgroup;
Predicate<Class<?>> packagePredicate = c -> subgroup == null || c.getPackageName().equals(subgroup);
plugin.tasks = filterAndGetTypeWithMetadata(registeredPlugin.getTasks(), packagePredicate);
plugin.triggers = filterAndGetTypeWithMetadata(registeredPlugin.getTriggers(), packagePredicate);
plugin.conditions = filterAndGetTypeWithMetadata(registeredPlugin.getConditions(), packagePredicate);
plugin.storages = filterAndGetTypeWithMetadata(registeredPlugin.getStorages(), packagePredicate);
plugin.secrets = filterAndGetTypeWithMetadata(registeredPlugin.getSecrets(), packagePredicate);
plugin.taskRunners = filterAndGetTypeWithMetadata(registeredPlugin.getTaskRunners(), packagePredicate);
plugin.apps = filterAndGetTypeWithMetadata(registeredPlugin.getApps(), packagePredicate);
plugin.appBlocks = filterAndGetTypeWithMetadata(registeredPlugin.getAppBlocks(), packagePredicate);
plugin.charts = filterAndGetTypeWithMetadata(registeredPlugin.getCharts(), packagePredicate);
plugin.dataFilters = filterAndGetTypeWithMetadata(registeredPlugin.getDataFilters(), packagePredicate);
plugin.dataFiltersKPI = filterAndGetTypeWithMetadata(registeredPlugin.getDataFiltersKPI(), packagePredicate);
plugin.logExporters = filterAndGetTypeWithMetadata(registeredPlugin.getLogExporters(), packagePredicate);
plugin.additionalPlugins = filterAndGetTypeWithMetadata(registeredPlugin.getAdditionalPlugins(), packagePredicate);
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated, packagePredicate);
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated, packagePredicate);
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated, packagePredicate);
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated, packagePredicate);
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated, packagePredicate);
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated, packagePredicate);
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated, packagePredicate);
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated, packagePredicate);
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated, packagePredicate);
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated, packagePredicate);
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated, packagePredicate);
plugin.additionalPlugins = filterAndGetClassName(registeredPlugin.getAdditionalPlugins(), includeDeprecated, packagePredicate);
return plugin;
}
@@ -109,18 +111,17 @@ public class Plugin {
* Those classes are only filtered from the documentation to ensure backward compatibility.
*
* @param list The list of classes?
* @param includeDeprecated whether to include deprecated plugins or not
* @return a filtered streams.
*/
private static List<PluginElementMetadata> filterAndGetTypeWithMetadata(final List<? extends Class<?>> list, Predicate<Class<?>> clazzFilter) {
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list, boolean includeDeprecated, Predicate<Class<?>> clazzFilter) {
return list
.stream()
.filter(not(io.kestra.core.models.Plugin::isInternal))
.filter(p -> includeDeprecated || !io.kestra.core.models.Plugin.isDeprecated(p))
.filter(clazzFilter)
.filter(c -> !c.getName().startsWith("org.kestra."))
.map(c -> new PluginElementMetadata(c.getName(), io.kestra.core.models.Plugin.isDeprecated(c) ? true : null))
.map(Class::getName)
.filter(c -> !c.startsWith("org.kestra."))
.toList();
}
public record PluginElementMetadata(String cls, Boolean deprecated) {
}
}

View File

@@ -6,6 +6,7 @@ import io.kestra.core.utils.Enums;
public enum SchemaType {
FLOW,
TEMPLATE,
TASK,
TRIGGER,
PLUGINDEFAULT,

View File

@@ -1,9 +1,9 @@
package io.kestra.scheduler.endpoint;
package io.kestra.core.endpoints;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.scheduler.AbstractScheduler;
import io.kestra.core.schedulers.AbstractScheduler;
import io.micronaut.context.annotation.Requires;
import io.micronaut.management.endpoint.annotation.Endpoint;
import io.micronaut.management.endpoint.annotation.Read;

View File

@@ -1,4 +1,4 @@
package io.kestra.worker.endpoint;
package io.kestra.core.endpoints;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.runners.WorkerTask;
@@ -11,18 +11,18 @@ import lombok.Builder;
import lombok.Getter;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.tasks.Task;
import io.kestra.worker.DefaultWorker;
import io.kestra.core.runners.Worker;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import jakarta.inject.Inject;
@Endpoint(id = "worker", defaultSensitive = false)
@Requires(property = "kestra.server-type", pattern = "(WORKER|STANDALONE)")
public class WorkerEndpoint {
@Inject
DefaultWorker worker;
Worker worker;
@Read
public WorkerEndpointResult running() throws Exception {

View File

@@ -3,88 +3,30 @@ package io.kestra.core.events;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.context.ServerRequestContext;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Objects;
@AllArgsConstructor
@Getter
public class CrudEvent<T> {
private final T model;
T model;
@Nullable
private final T previousModel;
private final CrudEventType type;
private final HttpRequest<?> request;
/**
* Static helper method for creating a new {@link CrudEventType#UPDATE} CrudEvent.
*
* @param model the new created model.
* @param <T> type of the model.
* @return the new {@link CrudEvent}.
*/
public static <T> CrudEvent<T> create(T model) {
Objects.requireNonNull(model, "Can't create CREATE event with a null model");
return new CrudEvent<>(model, null, CrudEventType.CREATE);
}
/**
* Static helper method for creating a new {@link CrudEventType#DELETE} CrudEvent.
*
* @param model the deleted model.
* @param <T> type of the model.
* @return the new {@link CrudEvent}.
*/
public static <T> CrudEvent<T> delete(T model) {
Objects.requireNonNull(model, "Can't create DELETE event with a null model");
return new CrudEvent<>(null, model, CrudEventType.DELETE);
}
/**
* Static helper method for creating a new CrudEvent.
*
* @param before the model before the update.
* @param after the model after the update.
* @param <T> type of the model.
* @return the new {@link CrudEvent}.
*/
public static <T> CrudEvent<T> of(T before, T after) {
if (before == null && after == null) {
throw new IllegalArgumentException("Both before and after cannot be null");
}
if (before == null) {
return create(after);
}
if (after == null) {
return delete(before);
}
return new CrudEvent<>(after, before, CrudEventType.UPDATE);
}
/**
* @deprecated use the static factory methods.
*/
@Deprecated
T previousModel;
CrudEventType type;
HttpRequest<?> request;
public CrudEvent(T model, CrudEventType type) {
this(
CrudEventType.DELETE.equals(type) ? null : model,
CrudEventType.DELETE.equals(type) ? model : null,
type,
ServerRequestContext.currentRequest().orElse(null)
);
this.model = model;
this.type = type;
this.previousModel = null;
this.request = ServerRequestContext.currentRequest().orElse(null);
}
public CrudEvent(T model, T previousModel, CrudEventType type) {
this(model, previousModel, type, ServerRequestContext.currentRequest().orElse(null));
}
public CrudEvent(T model, T previousModel, CrudEventType type, HttpRequest<?> request) {
this.model = model;
this.previousModel = previousModel;
this.type = type;
this.request = request;
this.request = ServerRequestContext.currentRequest().orElse(null);
}
}

View File

@@ -1,25 +0,0 @@
package io.kestra.core.exceptions;
/**
* General exception that can be thrown when an AI service replies with an error.
* When propagated in the context of a REST API call, this exception should
* result in an HTTP 422 UNPROCESSABLE_ENTITY response.
*/
public class AiException extends KestraRuntimeException {
/**
* Creates a new {@link AiException} instance.
*/
public AiException() {
super();
}
/**
* Creates a new {@link AiException} instance.
*
* @param aiErrorMessage the AI error message.
*/
public AiException(final String aiErrorMessage) {
super(aiErrorMessage);
}
}

View File

@@ -1,30 +0,0 @@
package io.kestra.core.exceptions;
/**
* General exception that can be thrown when a Kestra resource or entity conflicts with an existing one.
* <p>
* Typically used in REST API contexts to signal situations such as:
* attempting to create a resource that already exists, or updating a resource
* in a way that causes a conflict.
* <p>
* When propagated in the context of a REST API call, this exception should
* result in an HTTP 409 Conflict response.
*/
public class ConflictException extends KestraRuntimeException {
/**
* Creates a new {@link ConflictException} instance.
*/
public ConflictException() {
super();
}
/**
* Creates a new {@link ConflictException} instance.
*
* @param message the error message.
*/
public ConflictException(final String message) {
super(message);
}
}

View File

@@ -1,43 +0,0 @@
package io.kestra.core.exceptions;
import java.io.Serial;
import java.util.List;
/**
* General exception that can be throws when a Kestra entity field is query, but is not valid or existing.
*/
public class InvalidQueryFiltersException extends KestraRuntimeException {
@Serial
private static final long serialVersionUID = 1L;
private static final String INVALID_QUERY_FILTER_MESSAGE = "Provided query filters are invalid";
private transient final List<String> invalids;
/**
* Creates a new {@link InvalidQueryFiltersException} instance.
*
* @param invalids the invalid filters.
*/
public InvalidQueryFiltersException(final List<String> invalids) {
super(INVALID_QUERY_FILTER_MESSAGE);
this.invalids = invalids;
}
/**
* Creates a new {@link InvalidQueryFiltersException} instance.
*
* @param invalid the invalid filter.
*/
public InvalidQueryFiltersException(final String invalid) {
super(INVALID_QUERY_FILTER_MESSAGE);
this.invalids = List.of(invalid);
}
public String formatedInvalidObjects(){
if (invalids == null || invalids.isEmpty()){
return INVALID_QUERY_FILTER_MESSAGE;
}
return String.join(", ", invalids);
}
}

View File

@@ -1,15 +0,0 @@
package io.kestra.core.exceptions;
public class InvalidTriggerConfigurationException extends KestraRuntimeException {
public InvalidTriggerConfigurationException() {
super();
}
public InvalidTriggerConfigurationException(String message) {
super(message);
}
public InvalidTriggerConfigurationException(String message, Throwable cause) {
super(message, cause);
}
}

Some files were not shown because too many files have changed in this diff Show More