mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 05:00:31 -05:00
Compare commits
634 Commits
feat/not-n
...
fix/logs-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c0baedf7f1 | ||
|
|
a06421dd84 | ||
|
|
de6fcab785 | ||
|
|
b2f68a7b97 | ||
|
|
01cb30f933 | ||
|
|
d2cda63cfa | ||
|
|
f89187db6a | ||
|
|
8e4fe892e9 | ||
|
|
eb13dce0ff | ||
|
|
a14518b810 | ||
|
|
c64f15a035 | ||
|
|
f79541616e | ||
|
|
cb6a6bfd91 | ||
|
|
b2ae2ff6f7 | ||
|
|
a7836ca673 | ||
|
|
7bc60a1056 | ||
|
|
09943a1e7b | ||
|
|
8b6af7a808 | ||
|
|
e74e7ff8e5 | ||
|
|
9adf3a5444 | ||
|
|
c069b2fbb3 | ||
|
|
534b7f4ec7 | ||
|
|
7b1ee4a9e0 | ||
|
|
38a9ebcbef | ||
|
|
eea47c6e40 | ||
|
|
e1c4ae22f2 | ||
|
|
b7861a139e | ||
|
|
954d64ecaa | ||
|
|
f61ba36023 | ||
|
|
fbd989ccab | ||
|
|
12affd4b4b | ||
|
|
b75730a0ca | ||
|
|
4170615765 | ||
|
|
5cfb6aa1f5 | ||
|
|
41d660e18e | ||
|
|
8af4f1928a | ||
|
|
1488caccc7 | ||
|
|
85fc48963f | ||
|
|
b706dec9d2 | ||
|
|
ceac4d38f9 | ||
|
|
ec7bf52e08 | ||
|
|
247594299c | ||
|
|
513139976c | ||
|
|
2cab9de57c | ||
|
|
cfae13c045 | ||
|
|
6190f8774a | ||
|
|
b98a0a783d | ||
|
|
3da2dc6257 | ||
|
|
6feb027696 | ||
|
|
83d6095669 | ||
|
|
444e3d2a77 | ||
|
|
752405ac78 | ||
|
|
860c1b218c | ||
|
|
0bd017556a | ||
|
|
63ec5cab27 | ||
|
|
e73f15a538 | ||
|
|
f871fa838e | ||
|
|
c2028759e4 | ||
|
|
21d6e0fa62 | ||
|
|
ab666bff11 | ||
|
|
92c082e2e0 | ||
|
|
5a3a54fd57 | ||
|
|
1576051ebb | ||
|
|
4ec2a5d064 | ||
|
|
846e20a100 | ||
|
|
433a332123 | ||
|
|
9b8b240d7c | ||
|
|
e6937c4a8c | ||
|
|
70dbe6a219 | ||
|
|
8ceee4cfdc | ||
|
|
b97347df97 | ||
|
|
b177a1f304 | ||
|
|
999406aee4 | ||
|
|
31fd0303b5 | ||
|
|
32b4a9e0be | ||
|
|
2c411a27cc | ||
|
|
504ff282ef | ||
|
|
365d82eb96 | ||
|
|
9a67466d9c | ||
|
|
a04db6510b | ||
|
|
ab2fea660a | ||
|
|
1c9016ba45 | ||
|
|
935ff944a5 | ||
|
|
fd4667383c | ||
|
|
5bec2fa8b3 | ||
|
|
c24fcfd0ca | ||
|
|
22424c69b6 | ||
|
|
938b3cc08b | ||
|
|
5fdf2f3085 | ||
|
|
062957982b | ||
|
|
c08213b48d | ||
|
|
152d96f018 | ||
|
|
4865843b10 | ||
|
|
47309a0782 | ||
|
|
7d7340b4ba | ||
|
|
eb1509959c | ||
|
|
5285bea930 | ||
|
|
cc5a1c9f68 | ||
|
|
725e5e5d78 | ||
|
|
56b903b8fd | ||
|
|
21c0c86238 | ||
|
|
65ab695001 | ||
|
|
1310e59cf9 | ||
|
|
c35352e2b4 | ||
|
|
59589b1c2d | ||
|
|
4729430a00 | ||
|
|
1aa37d5756 | ||
|
|
1a121951d6 | ||
|
|
92ee3f749e | ||
|
|
ae7bb88ff0 | ||
|
|
8f29a72df7 | ||
|
|
fc8732f96e | ||
|
|
14f4449d99 | ||
|
|
dd80a91ab3 | ||
|
|
840f010921 | ||
|
|
8462b178cb | ||
|
|
901625786d | ||
|
|
4def8c5764 | ||
|
|
65a204356c | ||
|
|
73e3fd08e9 | ||
|
|
0665b52014 | ||
|
|
781f9dc8d8 | ||
|
|
92e4570158 | ||
|
|
ce47b4ee5e | ||
|
|
69193c6096 | ||
|
|
9d437957fa | ||
|
|
dfea86fb07 | ||
|
|
6fee99a78a | ||
|
|
2c766a5497 | ||
|
|
959737f545 | ||
|
|
2c1b6ffe3c | ||
|
|
9b819f6925 | ||
|
|
a81c0a5737 | ||
|
|
ebcb0bd2a2 | ||
|
|
2a26c415bf | ||
|
|
1e06b9f1c0 | ||
|
|
0b64da5e84 | ||
|
|
d95b65082f | ||
|
|
1bba3e4035 | ||
|
|
efe0c6e1e4 | ||
|
|
fce7ec135e | ||
|
|
5a2456716f | ||
|
|
f43f8c2dc0 | ||
|
|
94811d4e06 | ||
|
|
b7166488be | ||
|
|
78066fef62 | ||
|
|
c70abcb85f | ||
|
|
774a4ef7a1 | ||
|
|
a48fd02ed7 | ||
|
|
030d948521 | ||
|
|
c4aa6c1097 | ||
|
|
3d1a3d0e7a | ||
|
|
30ab030244 | ||
|
|
cc083385f0 | ||
|
|
c14462f5fa | ||
|
|
d6e470d788 | ||
|
|
58ae507e21 | ||
|
|
71110ccfc3 | ||
|
|
fdcc07b546 | ||
|
|
221236e079 | ||
|
|
d14deaceb0 | ||
|
|
bfdc48bbbe | ||
|
|
e6b2f1f79a | ||
|
|
0632052837 | ||
|
|
3df9d49aa0 | ||
|
|
318f2b7d5a | ||
|
|
800970a88f | ||
|
|
f717063a83 | ||
|
|
9a7fb64943 | ||
|
|
45bddb8d09 | ||
|
|
881b009d9b | ||
|
|
ab818713f6 | ||
|
|
d68ffa3109 | ||
|
|
addd76f9bb | ||
|
|
6be939c1bd | ||
|
|
4ea876d3fe | ||
|
|
42d8005eff | ||
|
|
58a360fae0 | ||
|
|
49b647e1fc | ||
|
|
b314fc393b | ||
|
|
0a298cad17 | ||
|
|
61170e6067 | ||
|
|
cfbffad31a | ||
|
|
41e2dac4ca | ||
|
|
0fa8386cb3 | ||
|
|
0f45c009ab | ||
|
|
b86177f329 | ||
|
|
fe396c455b | ||
|
|
0830e11645 | ||
|
|
4d7f6b2bb1 | ||
|
|
955c6b728b | ||
|
|
d2d26351bd | ||
|
|
f14b638f73 | ||
|
|
259b5b5282 | ||
|
|
b1c50374b4 | ||
|
|
de2d923bd4 | ||
|
|
89c76208a4 | ||
|
|
12eb8367ec | ||
|
|
7421f1e93d | ||
|
|
5642a53893 | ||
|
|
d5a2f4430f | ||
|
|
0299e0d5ce | ||
|
|
c69ac99a7f | ||
|
|
fd1b4d5234 | ||
|
|
4ef3600954 | ||
|
|
366df0f37f | ||
|
|
5e87655a0e | ||
|
|
0f01699d27 | ||
|
|
0794b2bf8e | ||
|
|
0becaa0b97 | ||
|
|
7d3bb34fd4 | ||
|
|
b8c55baff1 | ||
|
|
1f8e5ad18e | ||
|
|
945f59afd0 | ||
|
|
491fb54e2c | ||
|
|
154f145d4a | ||
|
|
8a73edd772 | ||
|
|
660fcd7ec6 | ||
|
|
675aa666ab | ||
|
|
451de81d03 | ||
|
|
323527d7e9 | ||
|
|
ea7c709f56 | ||
|
|
bc8bd7b00b | ||
|
|
40f6334b0b | ||
|
|
71151f5ac2 | ||
|
|
75e7635505 | ||
|
|
6c9dc8fba4 | ||
|
|
28d1f005a4 | ||
|
|
7fd7a6fcbc | ||
|
|
57707faf37 | ||
|
|
c64c2c7104 | ||
|
|
8f9b2fc0db | ||
|
|
aab9e55794 | ||
|
|
f9d48d6f74 | ||
|
|
267d848d3f | ||
|
|
edbf14c1b8 | ||
|
|
d9ac267161 | ||
|
|
74455ad993 | ||
|
|
e2dd02173f | ||
|
|
c7b3a42f34 | ||
|
|
fe882bbee6 | ||
|
|
1efb21c087 | ||
|
|
beb6182104 | ||
|
|
7f0d3521db | ||
|
|
7e4453632c | ||
|
|
4a3d6b30d2 | ||
|
|
fe7c14c048 | ||
|
|
7b007aafc0 | ||
|
|
f6aa2729b2 | ||
|
|
0ca25445fd | ||
|
|
4900963cb9 | ||
|
|
21d264f444 | ||
|
|
b6a3d0cd54 | ||
|
|
3244b1c293 | ||
|
|
0987d0b349 | ||
|
|
5a2ac895ec | ||
|
|
7421693445 | ||
|
|
19894dbcd6 | ||
|
|
62e37f3b15 | ||
|
|
37af61f41d | ||
|
|
491f07296a | ||
|
|
6d59630a67 | ||
|
|
a098650559 | ||
|
|
505049a69e | ||
|
|
a1abd28a3d | ||
|
|
49a29c4bf1 | ||
|
|
49af8f336a | ||
|
|
8642771a2b | ||
|
|
e44487c8d8 | ||
|
|
f29aab0719 | ||
|
|
73c7a2d3d3 | ||
|
|
773a6e9093 | ||
|
|
df1bbcfb7f | ||
|
|
f3419084fa | ||
|
|
ab7788aeaf | ||
|
|
99e06632e0 | ||
|
|
7b3d5a593e | ||
|
|
cca7ed0bff | ||
|
|
481138e433 | ||
|
|
267ffb69dc | ||
|
|
43f1374aa3 | ||
|
|
fe944ccc5b | ||
|
|
cec8702fc6 | ||
|
|
f898a679c4 | ||
|
|
9a9d59f433 | ||
|
|
0579e23a20 | ||
|
|
343367d8c5 | ||
|
|
d47e8083ac | ||
|
|
7c4d6b02a7 | ||
|
|
95dd1b8a55 | ||
|
|
0ec2d88429 | ||
|
|
c3b504a9ed | ||
|
|
8cba4dab62 | ||
|
|
1b22d1e3d1 | ||
|
|
6194f244c8 | ||
|
|
11a1664865 | ||
|
|
db394f6371 | ||
|
|
a7433c6f62 | ||
|
|
745c64c4b0 | ||
|
|
5d15e2081c | ||
|
|
7d098eaa4e | ||
|
|
0ee9abb372 | ||
|
|
3f542ae737 | ||
|
|
811cc7722a | ||
|
|
f4f8ecb247 | ||
|
|
3566c4d365 | ||
|
|
a7421987b7 | ||
|
|
c640b8208b | ||
|
|
db71b56dcd | ||
|
|
8076fcc990 | ||
|
|
bd520c2150 | ||
|
|
2c731a0192 | ||
|
|
3b291b75cf | ||
|
|
1563d1d2da | ||
|
|
5c73953c81 | ||
|
|
f89aa8d278 | ||
|
|
7efa3975fc | ||
|
|
a3362b0c85 | ||
|
|
ab9ba91e5a | ||
|
|
355e24c9da | ||
|
|
78dcca64ae | ||
|
|
57c749d812 | ||
|
|
2d7c233a5e | ||
|
|
cfac2c6fef | ||
|
|
06352e46dc | ||
|
|
08312aef97 | ||
|
|
e05554e12d | ||
|
|
cbb6ca77f7 | ||
|
|
3dcd3c9785 | ||
|
|
7f6e15ec4a | ||
|
|
ab061e9a12 | ||
|
|
8eb91b75e9 | ||
|
|
0134d5e5c2 | ||
|
|
6946c92683 | ||
|
|
149dcac5f9 | ||
|
|
0af5aea287 | ||
|
|
9413d4df06 | ||
|
|
87ee267f10 | ||
|
|
02336ed397 | ||
|
|
379199e186 | ||
|
|
ebec8c2121 | ||
|
|
564c36ceab | ||
|
|
3d248d0d74 | ||
|
|
70e6d47c1a | ||
|
|
1ba54cd08e | ||
|
|
eb6b40a04b | ||
|
|
d314c52924 | ||
|
|
71a59e7b85 | ||
|
|
ff504afd8f | ||
|
|
64e5b80049 | ||
|
|
c9baaf8565 | ||
|
|
02ad2af305 | ||
|
|
bed11f1541 | ||
|
|
6d14773ea7 | ||
|
|
74577cb059 | ||
|
|
e63b019857 | ||
|
|
8684bd9481 | ||
|
|
586e089ada | ||
|
|
acebfef0d1 | ||
|
|
f7019af9d5 | ||
|
|
1419680006 | ||
|
|
64290f5d17 | ||
|
|
e2da2dfeb0 | ||
|
|
977fe222a4 | ||
|
|
0a24a5d13f | ||
|
|
ecfe925ece | ||
|
|
76f6c93e48 | ||
|
|
cfb76f4279 | ||
|
|
50bbf03d53 | ||
|
|
2c9f09a82e | ||
|
|
c9579e5dcd | ||
|
|
1b13559a61 | ||
|
|
c2e62d653d | ||
|
|
14ff4438fe | ||
|
|
39b8fc1039 | ||
|
|
1cb323b7aa | ||
|
|
4a12827684 | ||
|
|
82a346b2ce | ||
|
|
dea392a941 | ||
|
|
d0af5767d5 | ||
|
|
a064c7a956 | ||
|
|
9b5b2b981f | ||
|
|
fac2ae813c | ||
|
|
233a871c58 | ||
|
|
05f24f3d65 | ||
|
|
66e5a7ca31 | ||
|
|
fefaa7cdbb | ||
|
|
fd6b9dc065 | ||
|
|
5935308e43 | ||
|
|
83f06f3374 | ||
|
|
8d7bc6fdd4 | ||
|
|
03a44c1039 | ||
|
|
5af0858445 | ||
|
|
0a304ff1d3 | ||
|
|
92418841fc | ||
|
|
01036c829d | ||
|
|
5ffeee532c | ||
|
|
c606760522 | ||
|
|
0870d8ebd8 | ||
|
|
d2d0726f73 | ||
|
|
868a232527 | ||
|
|
3ef11044a2 | ||
|
|
2d1582f761 | ||
|
|
94d36fdc01 | ||
|
|
0388909828 | ||
|
|
ff4f43c39b | ||
|
|
140d058beb | ||
|
|
4276a0afd9 | ||
|
|
1692cab533 | ||
|
|
3013e9dfd5 | ||
|
|
440a942900 | ||
|
|
7696d41d5f | ||
|
|
45abaa146e | ||
|
|
e9f2711cd3 | ||
|
|
166262209a | ||
|
|
99bad6abb0 | ||
|
|
acc10ed638 | ||
|
|
88341bb5c9 | ||
|
|
e8411b6b11 | ||
|
|
6af105a8bf | ||
|
|
9a2e09cc8c | ||
|
|
9b1a9d64bc | ||
|
|
4678616520 | ||
|
|
07e4598fa4 | ||
|
|
01fe48b47a | ||
|
|
f7e61a46df | ||
|
|
dfe5552a1e | ||
|
|
593558dd22 | ||
|
|
b22764290e | ||
|
|
78bfbf0d5e | ||
|
|
f505f29360 | ||
|
|
1726347dbf | ||
|
|
bbf232ba52 | ||
|
|
8f9fc5fe4b | ||
|
|
9a56b763f4 | ||
|
|
40d37d9e42 | ||
|
|
e200bbdb6b | ||
|
|
ef65623b13 | ||
|
|
d8b2e92e8d | ||
|
|
2c63112a59 | ||
|
|
20b87f1c9c | ||
|
|
ca4e6a4b33 | ||
|
|
82df58d26f | ||
|
|
b6f91128a1 | ||
|
|
a7abc17c0b | ||
|
|
9607d0152e | ||
|
|
fc3c1a4c54 | ||
|
|
4cee4fca61 | ||
|
|
475573d13e | ||
|
|
cfac9f339f | ||
|
|
cd97705d87 | ||
|
|
56406c6b5c | ||
|
|
38921a265a | ||
|
|
be61869642 | ||
|
|
1f09afe564 | ||
|
|
35a0520dba | ||
|
|
cf6fad0896 | ||
|
|
af0c1134e1 | ||
|
|
12180457ea | ||
|
|
1eb9adf30a | ||
|
|
e7f2ec2aee | ||
|
|
94421f141e | ||
|
|
65b071adfd | ||
|
|
d2bf56fecf | ||
|
|
998a5cca32 | ||
|
|
4b48ad597d | ||
|
|
66fdb58f4b | ||
|
|
8c708e2d53 | ||
|
|
677585213a | ||
|
|
ff3f90465d | ||
|
|
8f5189df49 | ||
|
|
9446eefd94 | ||
|
|
334410ab58 | ||
|
|
7ccb97a963 | ||
|
|
3957177455 | ||
|
|
5b42d0adba | ||
|
|
8bd3c2fef8 | ||
|
|
d8fc4a9ce2 | ||
|
|
bfd82e0b5c | ||
|
|
fa07cbd3b9 | ||
|
|
2c77a43935 | ||
|
|
60a84a49f8 | ||
|
|
d8295ef4ae | ||
|
|
b02325a2e4 | ||
|
|
a098847c65 | ||
|
|
5f21eb5790 | ||
|
|
036a7cf4f7 | ||
|
|
446a034d6b | ||
|
|
3fff36881a | ||
|
|
174ac280d2 | ||
|
|
c610ccdaee | ||
|
|
d8ed5b8b88 | ||
|
|
17e54134ce | ||
|
|
cacbd069f9 | ||
|
|
de5a0f4623 | ||
|
|
0f6c8d3521 | ||
|
|
c8207b8706 | ||
|
|
39cd6189ac | ||
|
|
9b4f3148fc | ||
|
|
ca6a7a9e16 | ||
|
|
3dc8e98ed3 | ||
|
|
ea79be9de8 | ||
|
|
3804bdc7f7 | ||
|
|
d04764814a | ||
|
|
020d674d8c | ||
|
|
ec7458dce5 | ||
|
|
ae75ea06d2 | ||
|
|
53cd056871 | ||
|
|
9bab2d2cfa | ||
|
|
4d87d95eac | ||
|
|
81635ddc84 | ||
|
|
781d1338e9 | ||
|
|
37ba85a130 | ||
|
|
7c7ac5a70d | ||
|
|
f315485fc6 | ||
|
|
04b8a0a14c | ||
|
|
8dc702ed16 | ||
|
|
6dee52da16 | ||
|
|
31dce9aadd | ||
|
|
cf1d98f56d | ||
|
|
3bbe65653b | ||
|
|
5108dd5e25 | ||
|
|
1ea331b8ab | ||
|
|
e9ceda4666 | ||
|
|
a996347de2 | ||
|
|
d8e3a9ab8b | ||
|
|
e097bdbe53 | ||
|
|
0090caad3f | ||
|
|
7fbe433221 | ||
|
|
b489848ab9 | ||
|
|
fc7ef1ca38 | ||
|
|
0dfe54baa8 | ||
|
|
45c5592609 | ||
|
|
3134d2abe2 | ||
|
|
2bb5ac61c2 | ||
|
|
be04c168fd | ||
|
|
04c4916ac6 | ||
|
|
8d2af87db3 | ||
|
|
e7950279bb | ||
|
|
44936e9cdf | ||
|
|
27755a3791 | ||
|
|
ff4f7abb0f | ||
|
|
48b117b351 | ||
|
|
daca5675d0 | ||
|
|
b3278bf208 | ||
|
|
8a26fdd83e | ||
|
|
38b8190be9 | ||
|
|
52cb63c436 | ||
|
|
f3cff1b8cd | ||
|
|
a2c89e508c | ||
|
|
c8aae742a2 | ||
|
|
eab7b746e5 | ||
|
|
1d5981665f | ||
|
|
4500c976d6 | ||
|
|
cf10269f25 | ||
|
|
cf635058cd | ||
|
|
bd6937a9f0 | ||
|
|
59cc6f32d0 | ||
|
|
b8423ef70e | ||
|
|
0ec1b3a983 | ||
|
|
8cebd39f0d | ||
|
|
9fce6cfe65 | ||
|
|
c8c0c4e63f | ||
|
|
25723b1acb | ||
|
|
065d299422 | ||
|
|
9ec4d9282b | ||
|
|
16284e5b95 | ||
|
|
a115eb5373 | ||
|
|
ac2643c108 | ||
|
|
4710baec7d | ||
|
|
1efe531228 | ||
|
|
df92491e55 | ||
|
|
e7c65e020a | ||
|
|
90212ee7dc | ||
|
|
ac1984b349 | ||
|
|
c5767fd313 | ||
|
|
8ee79264ed | ||
|
|
c6d21776ec | ||
|
|
45a9ea0cc5 | ||
|
|
1f60a86739 | ||
|
|
d0e8b691e3 | ||
|
|
0687a97eb8 | ||
|
|
dcf89b5db8 | ||
|
|
b1e29fbe31 | ||
|
|
4b2113fb9b | ||
|
|
dc1728e420 | ||
|
|
8c83290d7a | ||
|
|
212b3a82a9 | ||
|
|
1612c65ea3 | ||
|
|
6257b1508c | ||
|
|
f1670b8ac8 | ||
|
|
ce15ca1cac | ||
|
|
a53395ab3f | ||
|
|
e0eaf675b2 | ||
|
|
666f8a7ad9 | ||
|
|
cebe8f3545 | ||
|
|
16e3830c9c | ||
|
|
15b85ac952 | ||
|
|
db016a085f | ||
|
|
d5f6da10dd | ||
|
|
3723275817 | ||
|
|
ef29883138 | ||
|
|
5ced9b6c4f | ||
|
|
11f3629f38 | ||
|
|
d8c5a1e14c | ||
|
|
cee2734817 | ||
|
|
13cb0fb96d | ||
|
|
a020e3f3ae | ||
|
|
12fef24279 | ||
|
|
4a55485cd6 | ||
|
|
aed5734e17 | ||
|
|
d55ce16f57 | ||
|
|
4abf3e9e6b | ||
|
|
fbd8934346 | ||
|
|
df6d339310 | ||
|
|
a89ef71580 | ||
|
|
89c2d3951c | ||
|
|
e061a3617c | ||
|
|
410cf0e389 | ||
|
|
f783ab72b5 | ||
|
|
72e78c8c31 | ||
|
|
a716094a9d | ||
|
|
c75def0eac | ||
|
|
c27d5ce480 | ||
|
|
49960d67ab | ||
|
|
d9d2f86971 | ||
|
|
95f5862201 | ||
|
|
e6419cf7a2 | ||
|
|
c1faeab068 | ||
|
|
dd3ff3e771 | ||
|
|
e635338b47 | ||
|
|
ddfed2e65c | ||
|
|
52fe6afae0 | ||
|
|
a9ff2af159 |
82
.devcontainer/Dockerfile
Normal file
82
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,82 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ARG BUILDPLATFORM
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
USER root
|
||||
WORKDIR /root
|
||||
|
||||
RUN apt update && apt install -y \
|
||||
apt-transport-https ca-certificates gnupg curl wget git zip unzip less zsh net-tools iputils-ping jq lsof
|
||||
|
||||
ENV HOME="/root"
|
||||
|
||||
# --------------------------------------
|
||||
# Git
|
||||
# --------------------------------------
|
||||
# Need to add the devcontainer workspace folder as a safe directory to enable git
|
||||
# version control system to be enabled in the containers file system.
|
||||
RUN git config --global --add safe.directory "/workspaces/kestra"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Oh my zsh
|
||||
# --------------------------------------
|
||||
RUN sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" -- \
|
||||
-t robbyrussell \
|
||||
-p git -p node -p npm
|
||||
|
||||
ENV SHELL=/bin/zsh
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Java
|
||||
# --------------------------------------
|
||||
ARG OS_ARCHITECTURE
|
||||
|
||||
RUN mkdir -p /usr/java
|
||||
RUN echo "Building on platform: $BUILDPLATFORM"
|
||||
RUN case "$BUILDPLATFORM" in \
|
||||
"linux/amd64") OS_ARCHITECTURE="linux-x64" ;; \
|
||||
"linux/arm64") OS_ARCHITECTURE="linux-aarch64" ;; \
|
||||
"darwin/amd64") OS_ARCHITECTURE="macos-x64" ;; \
|
||||
"darwin/arm64") OS_ARCHITECTURE="macos-aarch64" ;; \
|
||||
*) echo "Unsupported BUILDPLATFORM: $BUILDPLATFORM" && exit 1 ;; \
|
||||
esac && \
|
||||
wget "https://aka.ms/download-jdk/microsoft-jdk-21.0.6-$OS_ARCHITECTURE.tar.gz" && \
|
||||
mv "microsoft-jdk-21.0.6-$OS_ARCHITECTURE.tar.gz" microsoft-jdk-21.0.6.tar.gz
|
||||
RUN tar -xzvf microsoft-jdk-21.0.6.tar.gz && \
|
||||
mv jdk-21.0.6+7 jdk-21 && \
|
||||
mv jdk-21 /usr/java/
|
||||
ENV JAVA_HOME=/usr/java/jdk-21
|
||||
ENV PATH="$PATH:$JAVA_HOME/bin"
|
||||
# Will load a custom configuration file for Micronaut
|
||||
ENV MICRONAUT_ENVIRONMENTS=local,override
|
||||
# Sets the path where you save plugins as Jar and is loaded during the startup process
|
||||
ENV KESTRA_PLUGINS_PATH="/workspaces/kestra/local/plugins"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Node.js
|
||||
# --------------------------------------
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_22.x -o nodesource_setup.sh \
|
||||
&& bash nodesource_setup.sh && apt install -y nodejs
|
||||
# Increases JavaScript heap memory to 4GB to prevent heap out of error during startup
|
||||
ENV NODE_OPTIONS=--max-old-space-size=4096
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Python
|
||||
# --------------------------------------
|
||||
RUN apt install -y python3 pip python3-venv
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# SSH
|
||||
# --------------------------------------
|
||||
RUN mkdir -p ~/.ssh
|
||||
RUN touch ~/.ssh/config
|
||||
RUN echo "Host github.com" >> ~/.ssh/config \
|
||||
&& echo " IdentityFile ~/.ssh/id_ed25519" >> ~/.ssh/config
|
||||
RUN touch ~/.ssh/id_ed25519
|
||||
# --------------------------------------
|
||||
149
.devcontainer/README.md
Normal file
149
.devcontainer/README.md
Normal file
@@ -0,0 +1,149 @@
|
||||
# Kestra Devcontainer
|
||||
|
||||
This devcontainer provides a quick and easy setup for anyone using VSCode to get up and running quickly with this project to start development on either the frontend or backend. It bootstraps a docker container for you to develop inside of without the need to manually setup the environment.
|
||||
|
||||
---
|
||||
|
||||
## INSTRUCTIONS
|
||||
|
||||
### Setup:
|
||||
|
||||
Take a look at this guide to get an idea of what the setup is like as this devcontainer setup follows this approach: https://kestra.io/docs/getting-started/contributing
|
||||
|
||||
Once you have this repo cloned to your local system, you will need to install the VSCode extension [Remote Development](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack).
|
||||
|
||||
Then run the following command from the command palette:
|
||||
`Dev Containers: Open Folder in Container...` and select your Kestra root folder.
|
||||
|
||||
This will then put you inside a docker container ready for development.
|
||||
|
||||
NOTE: you'll need to wait for the gradle build to finish and compile Java files but this process should happen automatically within VSCode.
|
||||
|
||||
In the meantime, you can move onto the next step...
|
||||
|
||||
---
|
||||
|
||||
### Development:
|
||||
|
||||
- Create a `.env.development.local` file in the `ui` folder and paste the following:
|
||||
|
||||
```bash
|
||||
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
|
||||
VITE_APP_API_URL=http://localhost:8080
|
||||
```
|
||||
|
||||
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
|
||||
|
||||
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
|
||||
|
||||
Now you have two choices:
|
||||
|
||||
`Local mode`:
|
||||
|
||||
Runs the Kestra server in local mode which uses a H2 database, so this is the only config you'd need:
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
You can then open a new terminal and run the following command to start the backend server: `./gradlew runLocal`
|
||||
|
||||
`Standalone mode`:
|
||||
|
||||
Runs in standalone mode which uses Postgres. Make sure to have a local Postgres instance already running on localhost:
|
||||
|
||||
```yaml
|
||||
kestra:
|
||||
repository:
|
||||
type: postgres
|
||||
storage:
|
||||
type: local
|
||||
local:
|
||||
base-path: "/app/storage"
|
||||
queue:
|
||||
type: postgres
|
||||
tasks:
|
||||
tmp-dir:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
# It is important to note that you must use the "host.docker.internal" host when connecting to a docker container outside of your devcontainer as attempting to use localhost will only point back to this devcontainer.
|
||||
url: jdbc:postgresql://host.docker.internal:5432/kestra
|
||||
driverClassName: org.postgresql.Driver
|
||||
username: kestra
|
||||
password: k3str4
|
||||
|
||||
flyway:
|
||||
datasources:
|
||||
postgres:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
# We must ignore missing migrations as we may delete the wrong ones or delete those that are not used anymore.
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
Then add the following settings to the `.vscode/launch.json` file:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "java",
|
||||
"name": "Kestra Standalone",
|
||||
"request": "launch",
|
||||
"mainClass": "io.kestra.cli.App",
|
||||
"projectName": "cli",
|
||||
"args": "server standalone"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
You can then use the VSCode `Run and Debug` extension to start the Kestra server.
|
||||
|
||||
Additionally, if you're doing frontend development, you can run `npm run dev` from the `ui` folder after having the above running (which will provide a backend) to access your application from `localhost:5173`. This has the benefit to watch your changes and hot-reload upon doing frontend changes.
|
||||
|
||||
#### Plugins
|
||||
If you want your plugins to be loaded inside your devcontainer, point the `source` field to a folder containing jars of the plugins you want to embed in the following snippet in `devcontainer.json`:
|
||||
```
|
||||
"mounts": [
|
||||
{
|
||||
"source": "/absolute/path/to/your/local/jar/plugins/folder",
|
||||
"target": "/workspaces/kestra/local/plugins",
|
||||
"type": "bind"
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GIT
|
||||
|
||||
If you want to commit to GitHub, make sure to navigate to the `~/.ssh` folder and either create a new SSH key or override the existing `id_ed25519` file and paste an existing SSH key from your local machine into this file. You will then need to change the permissions of the file by running: `chmod 600 id_ed25519`. This will allow you to then push to GitHub.
|
||||
|
||||
---
|
||||
46
.devcontainer/devcontainer.json
Normal file
46
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "kestra",
|
||||
"build": {
|
||||
"context": ".",
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"workspaceFolder": "/workspaces/kestra",
|
||||
"forwardPorts": [5173, 8080],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/bin/zsh"
|
||||
}
|
||||
},
|
||||
"workbench.iconTheme": "vscode-icons",
|
||||
"editor.tabSize": 4,
|
||||
"editor.formatOnSave": true,
|
||||
"files.insertFinalNewline": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"telemetry.telemetryLevel": "off",
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": "active"
|
||||
},
|
||||
"extensions": [
|
||||
"redhat.vscode-yaml",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"eamodio.gitlens",
|
||||
"esbenp.prettier-vscode",
|
||||
"aaron-bond.better-comments",
|
||||
"codeandstuff.package-json-upgrade",
|
||||
"andys8.jest-snippets",
|
||||
"oderwat.indent-rainbow",
|
||||
"evondev.indent-rainbow-palettes",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"IronGeek.vscode-env",
|
||||
"yoavbls.pretty-ts-errors",
|
||||
"github.vscode-github-actions",
|
||||
"vscjava.vscode-java-pack",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
8
.github/CONTRIBUTING.md
vendored
8
.github/CONTRIBUTING.md
vendored
@@ -37,6 +37,10 @@ The following dependencies are required to build Kestra locally:
|
||||
- Docker & Docker Compose
|
||||
- an IDE (Intellij IDEA, Eclipse or VS Code)
|
||||
|
||||
Thanks to the Kestra community, if using VSCode, you can also start development on either the frontend or backend with a bootstrapped docker container without the need to manually set up the environment.
|
||||
|
||||
Check out the [README](../.devcontainer/README.md) for set-up instructions and the associated [Dockerfile](../.devcontainer/Dockerfile) in the respository to get started.
|
||||
|
||||
To start contributing:
|
||||
- [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the repository
|
||||
- Clone the fork on your workstation:
|
||||
@@ -46,7 +50,7 @@ git clone git@github.com:{YOUR_USERNAME}/kestra.git
|
||||
cd kestra
|
||||
```
|
||||
|
||||
#### Develop backend
|
||||
#### Develop on the backend
|
||||
The backend is made with [Micronaut](https://micronaut.io).
|
||||
|
||||
Open the cloned repository in your favorite IDE. In most of decent IDEs, Gradle build will be detected and all dependencies will be downloaded.
|
||||
@@ -72,7 +76,7 @@ python3 -m pip install virtualenv
|
||||
```
|
||||
|
||||
|
||||
#### Develop frontend
|
||||
#### Develop on the frontend
|
||||
The frontend is made with [Vue.js](https://vuejs.org/) and located on the `/ui` folder.
|
||||
|
||||
- `npm install`
|
||||
|
||||
57
.github/actions/generate-translations/action.yml
vendored
57
.github/actions/generate-translations/action.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Generate Translations
|
||||
description: "Required the environment variable OPENAI_API_KEY to be set. This action will generate translations for the UI."
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: 'GitHub Token'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
shell: bash
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
shell: bash
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
shell: bash
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
shell: bash
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Check for changes and commit
|
||||
env:
|
||||
GH_TOKEN: ${{ inputs.github-token }}
|
||||
shell: bash
|
||||
run: |
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin ${{ github.head_ref }}
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
- name: Build with Gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
run: ./gradlew testClasses -x :ui:installFrontend -x :ui:assembleFrontend
|
||||
run: ./gradlew testClasses -x :ui:assembleFrontend
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
|
||||
17
.github/workflows/docker.yml
vendored
17
.github/workflows/docker.yml
vendored
@@ -47,11 +47,11 @@ jobs:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: ""
|
||||
packages: jattach
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libs: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -63,11 +63,11 @@ jobs:
|
||||
if [[ "${{ inputs.release-tag }}" == "" ]]; then
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
else
|
||||
TAG="${{ inputs.release-tag }}"
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
fi
|
||||
# Download release
|
||||
- name: Download release
|
||||
uses: robinraju/release-downloader@v1.11
|
||||
uses: robinraju/release-downloader@v1.12
|
||||
with:
|
||||
tag: ${{steps.vars.outputs.tag}}
|
||||
fileName: 'kestra-*'
|
||||
@@ -89,6 +89,11 @@ jobs:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
||||
67
.github/workflows/generate-translations.yml
vendored
Normal file
67
.github/workflows/generate-translations.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Auto-Translate UI keys and create PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9-21 * * *" # Every hour from 9 AM to 9 PM
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retranslate_modified_keys:
|
||||
description: "Whether to re-translate modified keys even if they already have translations."
|
||||
type: choice
|
||||
options:
|
||||
- "false"
|
||||
- "true"
|
||||
default: "false"
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
translations:
|
||||
name: Translations
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Commit and create PR
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="chore/update-translations-$(date +%s)"
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(core): localize to languages other than english" -m "Extended localization support by adding translations for multiple languages using English as the base. This enhances accessibility and usability for non-English-speaking users while keeping English as the source reference."
|
||||
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
|
||||
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller
|
||||
3
.github/workflows/main.yml
vendored
3
.github/workflows/main.yml
vendored
@@ -18,7 +18,7 @@ on:
|
||||
- v*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -31,6 +31,7 @@ jobs:
|
||||
release:
|
||||
name: Release
|
||||
needs: [tests]
|
||||
if: "!startsWith(github.ref, 'refs/heads/releases')"
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
32
.github/workflows/pull-request.yml
vendored
32
.github/workflows/pull-request.yml
vendored
@@ -6,11 +6,15 @@ on:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ********************************************************************************************************************
|
||||
# File changes detection
|
||||
# ********************************************************************************************************************
|
||||
file-changes:
|
||||
if: ${{ github.event.pull_request.draft == false }}
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
@@ -25,34 +29,16 @@ jobs:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
translations:
|
||||
- 'ui/src/translations/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
translations:
|
||||
name: 'Translations - Generate translations'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
needs: file-changes
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
if: "needs.file-changes.outputs.translations == 'true'"
|
||||
|
||||
- id: generate-translations
|
||||
name: Translations - Generate translations
|
||||
if: "needs.file-changes.outputs.translations == 'true'"
|
||||
uses: ./.github/actions/generate-translations
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# ********************************************************************************************************************
|
||||
# Tests
|
||||
# ********************************************************************************************************************
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: [file-changes, translations]
|
||||
needs: [file-changes]
|
||||
if: "needs.file-changes.outputs.ui == 'true'"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
|
||||
3
.github/workflows/setversion-tag.yml
vendored
3
.github/workflows/setversion-tag.yml
vendored
@@ -23,12 +23,11 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CURRENT_BRANCH="{{ github.ref }}"
|
||||
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
|
||||
|
||||
CURRENT_BRANCH="$GITHUB_REF"
|
||||
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
|
||||
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
|
||||
exit 1
|
||||
|
||||
38
.github/workflows/vulnerabilities-check.yml
vendored
38
.github/workflows/vulnerabilities-check.yml
vendored
@@ -8,6 +8,9 @@ on:
|
||||
env:
|
||||
JAVA_VERSION: '21'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-check:
|
||||
name: Dependency Check
|
||||
@@ -37,7 +40,7 @@ jobs:
|
||||
- name: Npm - Install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm install
|
||||
run: npm ci
|
||||
|
||||
# Run OWASP dependency check plugin
|
||||
- name: Gradle Dependency Check
|
||||
@@ -57,6 +60,10 @@ jobs:
|
||||
develop-image-check:
|
||||
name: Image Check (develop)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
@@ -80,16 +87,28 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: table
|
||||
format: 'template'
|
||||
template: '@/contrib/sarif.tpl'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
output: 'trivy-results.sarif'
|
||||
skip-dirs: /app/plugins
|
||||
scanners: vuln
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: docker-
|
||||
|
||||
latest-image-check:
|
||||
name: Image Check (latest)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
@@ -113,9 +132,16 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
skip-dirs: /app/plugins
|
||||
scanners: vuln
|
||||
scanners: vuln
|
||||
severity: 'CRITICAL,HIGH'
|
||||
output: 'trivy-results.sarif'
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
10
.github/workflows/workflow-backend-test.yml
vendored
10
.github/workflows/workflow-backend-test.yml
vendored
@@ -16,6 +16,11 @@ on:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
@@ -26,6 +31,8 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
@@ -54,7 +61,7 @@ jobs:
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v1
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
@@ -63,6 +70,7 @@ jobs:
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
|
||||
@@ -107,6 +107,11 @@ jobs:
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
||||
22
.github/workflows/workflow-frontend-test.yml
vendored
22
.github/workflows/workflow-frontend-test.yml
vendored
@@ -19,11 +19,8 @@ jobs:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout - Current ref
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Npm - install
|
||||
shell: bash
|
||||
@@ -44,28 +41,15 @@ jobs:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:cicd
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Storybook - Build
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run build-storybook --quiet
|
||||
|
||||
- name: Storybook - Run tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:127.0.0.1:6006 && npm run test:storybook"
|
||||
run: npm run test:cicd
|
||||
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
|
||||
26
.github/workflows/workflow-github-release.yml
vendored
26
.github/workflows/workflow-github-release.yml
vendored
@@ -20,17 +20,23 @@ jobs:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
# GitHub Release
|
||||
- name: GitHub - Create release
|
||||
id: create_github_release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
# Checkout GitHub Actions
|
||||
- name: Checkout - Actions
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
repository: kestra-io/actions
|
||||
sparse-checkout-cone-mode: true
|
||||
path: actions
|
||||
sparse-checkout: |
|
||||
.github/actions
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
uses: ./actions/.github/actions/github-release
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
|
||||
82
.github/workflows/workflow-publish-docker.yml
vendored
82
.github/workflows/workflow-publish-docker.yml
vendored
@@ -1,7 +1,30 @@
|
||||
name: Publish - Docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: string
|
||||
default: "true"
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
force-download-artifact:
|
||||
description: 'Force download artifact'
|
||||
required: false
|
||||
type: string
|
||||
default: "true"
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
@@ -11,26 +34,39 @@ on:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# ********************************************************************************************************************
|
||||
# Build
|
||||
# ********************************************************************************************************************
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
name: Build Artifacts
|
||||
if: ${{ github.event.inputs.force-download-artifact == 'true' }}
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
# ********************************************************************************************************************
|
||||
# Docker
|
||||
# ********************************************************************************************************************
|
||||
publish:
|
||||
name: Publish - Docker
|
||||
needs: build-artifacts
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-artifacts
|
||||
if: |
|
||||
always() &&
|
||||
(needs.build-artifacts.result == 'success' ||
|
||||
github.event.inputs.force-download-artifact != 'true')
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}-no-plugins
|
||||
packages: ""
|
||||
- tag: -no-plugins
|
||||
packages: jattach
|
||||
plugins: false
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
plugins: ${{ needs.build-artifacts.outputs.plugins }}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
- tag: ""
|
||||
plugins: true
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
@@ -40,6 +76,11 @@ jobs:
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
@@ -50,17 +91,34 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
|
||||
# # Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
id: plugins-list
|
||||
if: ${{ matrix.image.plugins}}
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Vars
|
||||
- name: Docker - Set image name
|
||||
- name: Docker - Set variables
|
||||
shell: bash
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
PLUGINS="${{ matrix.image.plugins == true && steps.plugins-list.outputs.plugins || '' }}"
|
||||
if [[ $TAG == v* ]]; then
|
||||
TAG="${TAG}";
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
elif [[ $TAG = "develop" ]]; then
|
||||
TAG="develop";
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
TAG="build-${{ github.run_id }}";
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "tag=${TAG}${{ matrix.image.tag }}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
@@ -80,7 +138,7 @@ jobs:
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
tags: kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
|
||||
28
.github/workflows/workflow-release.yml
vendored
28
.github/workflows/workflow-release.yml
vendored
@@ -1,6 +1,18 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
publish-docker:
|
||||
description: "Publish Docker image"
|
||||
default: 'false'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
@@ -31,12 +43,23 @@ on:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
if: startsWith(github.ref, 'refs/heads/develop') || github.event.inputs.publish-docker == 'true'
|
||||
with:
|
||||
force-download-artifact: 'false'
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
@@ -50,6 +73,7 @@ jobs:
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
|
||||
25
.github/workflows/workflow-test.yml
vendored
25
.github/workflows/workflow-test.yml
vendored
@@ -19,8 +19,31 @@ on:
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -29,6 +52,8 @@ jobs:
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -37,6 +37,7 @@ ui/coverage
|
||||
ui/stats.html
|
||||
ui/.frontend-gradle-plugin
|
||||
ui/utils/CHANGELOG.md
|
||||
ui/test-report.junit.xml
|
||||
|
||||
### Docker
|
||||
/.env
|
||||
@@ -57,3 +58,4 @@ core/src/main/resources/gradle.properties
|
||||
**/allure-results/*
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
|
||||
1
.plugins
1
.plugins
@@ -32,6 +32,7 @@
|
||||
#plugin-git:io.kestra.plugin:plugin-git:LATEST
|
||||
#plugin-github:io.kestra.plugin:plugin-github:LATEST
|
||||
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
|
||||
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
|
||||
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
|
||||
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
|
||||
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
|
||||
|
||||
22
Makefile
22
Makefile
@@ -89,7 +89,7 @@ build-docker: build-exec
|
||||
--compress \
|
||||
--rm \
|
||||
-f ./Dockerfile \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip" \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
|
||||
--build-arg="PYTHON_LIBRARIES=kestra" \
|
||||
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
|
||||
|
||||
@@ -181,8 +181,8 @@ clone-plugins:
|
||||
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
|
||||
@mkdir -p "$(PLUGIN_GIT_DIR)"
|
||||
@echo "Fetching repository list from GitHub..."
|
||||
@REPOS=$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-") \
|
||||
for repo in $$REPOS; do \
|
||||
@REPOS=$$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-"); \
|
||||
for repo in $$REPOS; do \
|
||||
if [[ $$repo == plugin-* ]]; then \
|
||||
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
|
||||
echo "Skipping: $$repo (Already cloned)"; \
|
||||
@@ -194,6 +194,22 @@ clone-plugins:
|
||||
done
|
||||
@echo "Done!"
|
||||
|
||||
# Pull every plugins in main or master branch
|
||||
pull-plugins:
|
||||
@echo "🔍 Pulling repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
@for repo in "$(PLUGIN_GIT_DIR)"/*; do \
|
||||
if [ -d "$$repo/.git" ]; then \
|
||||
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
|
||||
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
|
||||
echo "🔄 Pulling: $$(basename "$$repo") (branch: $$branch)"; \
|
||||
git -C "$$repo" pull; \
|
||||
else \
|
||||
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch, currently on $$branch)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done
|
||||
@echo "✅ Done pulling!"
|
||||
|
||||
# Update all plugins jar
|
||||
build-plugins:
|
||||
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
|
||||
@@ -24,6 +24,13 @@
|
||||
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://trendshift.io/repositories/2714" target="_blank">
|
||||
<img src="https://trendshift.io/api/badge/repositories/2714" alt="kestra-io%2Fkestra | Trendshift" width="250" height="55"/>
|
||||
</a>
|
||||
<a href="https://www.producthunt.com/posts/kestra?embed=true&utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-kestra" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=612077&theme=light&period=daily&t=1740737506162" alt="Kestra - All-in-one automation & orchestration platform | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://go.kestra.io/video/product-overview" target="_blank">
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
|
||||
@@ -47,7 +54,7 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
- **Structure & Resilience**: tame chaos and bring resilience to your workflows with **namespaces**, **labels**, **subflows**, **retries**, **timeout**, **error handling**, **inputs**, **outputs** that generate artifacts in the UI, **variables**, **conditional branching**, **advanced scheduling**, **event triggers**, **backfills**, **dynamic tasks**, **sequential and parallel tasks**, and skip tasks or triggers when needed by setting the flag `disabled` to `true`.
|
||||
|
||||
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
|
||||
|
||||
<p align="center">
|
||||
|
||||
29
build.gradle
29
build.gradle
@@ -21,7 +21,7 @@ plugins {
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "6.0.1.5171"
|
||||
id "org.sonarqube" version "6.1.0.5360"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
@@ -33,13 +33,13 @@ plugins {
|
||||
// release
|
||||
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
|
||||
id 'net.researchgate.release' version '3.1.0'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.4.2"
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.0"
|
||||
id 'signing'
|
||||
id 'ru.vyarus.pom' version '3.0.0' apply false
|
||||
id 'ru.vyarus.github-info' version '2.0.0' apply false
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.0.2" apply false
|
||||
id "org.owasp.dependencycheck" version "12.1.1" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -74,7 +74,7 @@ dependencies {
|
||||
**********************************************************************************************************************/
|
||||
allprojects {
|
||||
if (it.name != 'platform') {
|
||||
group "io.kestra"
|
||||
group = "io.kestra"
|
||||
|
||||
java {
|
||||
sourceCompatibility = targetJavaVersion
|
||||
@@ -121,7 +121,6 @@ allprojects {
|
||||
micronaut "io.micronaut:micronaut-management"
|
||||
micronaut "io.micrometer:micrometer-core"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
micronaut "io.micronaut:micronaut-http-client"
|
||||
micronaut "io.micronaut.reactor:micronaut-reactor-http-client"
|
||||
micronaut "io.micronaut.tracing:micronaut-tracing-opentelemetry-http"
|
||||
@@ -197,6 +196,9 @@ subprojects {
|
||||
testImplementation 'org.hamcrest:hamcrest'
|
||||
testImplementation 'org.hamcrest:hamcrest-library'
|
||||
testImplementation 'org.exparity:hamcrest-date'
|
||||
|
||||
//assertj
|
||||
testImplementation 'org.assertj:assertj-core'
|
||||
}
|
||||
|
||||
test {
|
||||
@@ -214,8 +216,8 @@ subprojects {
|
||||
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
|
||||
environment 'SECRET_NON_B64_SECRET', "some secret value"
|
||||
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
|
||||
environment 'KESTRA_TEST1', "true"
|
||||
environment 'KESTRA_TEST2', "Pass by env"
|
||||
environment 'ENV_TEST1', "true"
|
||||
environment 'ENV_TEST2', "Pass by env"
|
||||
}
|
||||
|
||||
testlogger {
|
||||
@@ -280,7 +282,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.22.1"
|
||||
agent "org.aspectj:aspectjweaver:1.9.24"
|
||||
}
|
||||
|
||||
test {
|
||||
@@ -597,11 +599,10 @@ release {
|
||||
}
|
||||
|
||||
// Dynamically set properties with default values
|
||||
failOnSnapshotDependencies = (project.hasProperty('release.failOnSnapshotDependencies')
|
||||
? project.property('release.failOnSnapshotDependencies').toBoolean()
|
||||
: true)
|
||||
failOnSnapshotDependencies = providers.gradleProperty("release.failOnSnapshotDependencies")
|
||||
.map(val -> Boolean.parseBoolean(val))
|
||||
.getOrElse(true)
|
||||
|
||||
pushReleaseVersionBranch = (project.hasProperty('release.pushReleaseVersionBranch')
|
||||
? project.property('release.pushReleaseVersionBranch').toString()
|
||||
: null)
|
||||
pushReleaseVersionBranch = providers.gradleProperty("release.pushReleaseVersionBranch")
|
||||
.getOrElse(null)
|
||||
}
|
||||
|
||||
@@ -12,18 +12,9 @@ dependencies {
|
||||
implementation 'ch.qos.logback.contrib:logback-json-classic'
|
||||
implementation 'ch.qos.logback.contrib:logback-jackson'
|
||||
|
||||
// plugins
|
||||
implementation 'org.eclipse.aether:aether-api'
|
||||
implementation 'org.eclipse.aether:aether-spi'
|
||||
implementation 'org.eclipse.aether:aether-util'
|
||||
implementation 'org.eclipse.aether:aether-impl'
|
||||
implementation 'org.eclipse.aether:aether-connector-basic'
|
||||
implementation 'org.eclipse.aether:aether-transport-file'
|
||||
implementation 'org.eclipse.aether:aether-transport-http'
|
||||
implementation('org.apache.maven:maven-aether-provider') {
|
||||
// sisu dependency injector is not used
|
||||
exclude group: 'org.eclipse.sisu'
|
||||
}
|
||||
// OTLP metrics
|
||||
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
|
||||
// aether still use javax.inject
|
||||
compileOnly 'javax.inject:javax.inject:1'
|
||||
|
||||
@@ -43,4 +34,7 @@ dependencies {
|
||||
implementation project(":storage-local")
|
||||
|
||||
implementation project(":webserver")
|
||||
|
||||
//test
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
}
|
||||
|
||||
@@ -46,8 +46,18 @@ public abstract class AbstractApiCommand extends AbstractCommand {
|
||||
@Nullable
|
||||
private HttpClientConfiguration httpClientConfiguration;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected DefaultHttpClient client() throws URISyntaxException {
|
||||
DefaultHttpClient defaultHttpClient = new DefaultHttpClient(server.toURI(), httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration());
|
||||
DefaultHttpClient defaultHttpClient = DefaultHttpClient.builder()
|
||||
.uri(server.toURI())
|
||||
.configuration(httpClientConfiguration != null ? httpClientConfiguration : new DefaultHttpClientConfiguration())
|
||||
.build();
|
||||
MessageBodyHandlerRegistry defaultHandlerRegistry = defaultHttpClient.getHandlerRegistry();
|
||||
if (defaultHandlerRegistry instanceof ContextlessMessageBodyHandlerRegistry modifiableRegistry) {
|
||||
modifiableRegistry.add(MediaType.TEXT_JSON_TYPE, new NettyJsonHandler<>(JsonMapper.createDefault()));
|
||||
|
||||
@@ -4,16 +4,17 @@ import ch.qos.logback.classic.LoggerContext;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.commands.servers.ServerCommandInterface;
|
||||
import io.kestra.cli.services.StartupHookInterface;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.webserver.services.FlowAutoLoaderService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.yaml.YamlPropertySourceLoader;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import io.micronaut.management.endpoint.EndpointDefaultConfiguration;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import jakarta.inject.Provider;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import io.kestra.core.utils.Rethrow;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -26,10 +27,13 @@ import java.nio.file.Paths;
|
||||
import java.text.MessageFormat;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Callable;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
versionProvider = VersionProvider.class,
|
||||
mixinStandardHelpOptions = true,
|
||||
showDefaultValues = true
|
||||
@@ -49,22 +53,28 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
@Inject
|
||||
private io.kestra.core.utils.VersionProvider versionProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginRegistry> pluginRegistryProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginManager> pluginManagerProvider;
|
||||
|
||||
private PluginRegistry pluginRegistry;
|
||||
|
||||
@CommandLine.Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
@Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
private boolean[] verbose = new boolean[0];
|
||||
|
||||
@CommandLine.Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
@Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
private LogLevel logLevel = LogLevel.INFO;
|
||||
|
||||
@CommandLine.Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
@Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
private boolean internalLog = false;
|
||||
|
||||
@CommandLine.Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
@Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
private Path config = Paths.get(System.getProperty("user.home"), ".kestra/config.yml");
|
||||
|
||||
@CommandLine.Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = System.getenv("KESTRA_PLUGINS_PATH") != null ? Paths.get(System.getenv("KESTRA_PLUGINS_PATH")) : null;
|
||||
@Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = Optional.ofNullable(System.getenv("KESTRA_PLUGINS_PATH")).map(Paths::get).orElse(null);
|
||||
|
||||
public enum LogLevel {
|
||||
TRACE,
|
||||
@@ -76,7 +86,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(CommandLine.Command.class).name());
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(Command.class).name());
|
||||
startLogger();
|
||||
sendServerLog();
|
||||
if (this.startupHook != null) {
|
||||
@@ -84,8 +94,14 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
}
|
||||
|
||||
if (this.pluginsPath != null && loadExternalPlugins()) {
|
||||
pluginRegistry = pluginRegistry();
|
||||
pluginRegistry = pluginRegistryProvider.get();
|
||||
pluginRegistry.registerIfAbsent(pluginsPath);
|
||||
|
||||
// PluginManager mus only be initialized if a registry is also instantiated
|
||||
if (isPluginManagerEnabled()) {
|
||||
PluginManager manager = pluginManagerProvider.get();
|
||||
manager.start();
|
||||
}
|
||||
}
|
||||
|
||||
startWebserver();
|
||||
@@ -102,8 +118,15 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected PluginRegistry pluginRegistry() {
|
||||
return KestraContext.getContext().getPluginRegistry(); // Lazy init
|
||||
/**
|
||||
* Specifies whether the {@link PluginManager} service must be initialized.
|
||||
* <p>
|
||||
* This method can be overridden by concrete commands.
|
||||
*
|
||||
* @return {@code true} if the {@link PluginManager} service must be initialized.
|
||||
*/
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private static String message(String message, Object... format) {
|
||||
@@ -157,7 +180,6 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
logger.getName().startsWith("io.kestra") &&
|
||||
!logger.getName().startsWith("io.kestra.ee.runner.kafka.services"))
|
||||
)
|
||||
|| logger.getName().startsWith("flow")
|
||||
)
|
||||
.forEach(
|
||||
logger -> logger.setLevel(ch.qos.logback.classic.Level.valueOf(this.logLevel.name()))
|
||||
@@ -183,9 +205,9 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
if (this.endpointConfiguration.getPort().isPresent()) {
|
||||
URI endpoint = null;
|
||||
try {
|
||||
endpoint = new URIBuilder(server.getURL().toURI())
|
||||
.setPort(this.endpointConfiguration.getPort().get())
|
||||
.setPath("/health")
|
||||
endpoint = UriBuilder.of(server.getURL().toURI())
|
||||
.port(this.endpointConfiguration.getPort().get())
|
||||
.path("/health")
|
||||
.build();
|
||||
} catch (URISyntaxException e) {
|
||||
e.printStackTrace();
|
||||
@@ -207,10 +229,12 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected void shutdownHook(Rethrow.RunnableChecked<Exception> run) {
|
||||
protected void shutdownHook(boolean logShutdown, Rethrow.RunnableChecked<Exception> run) {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(
|
||||
() -> {
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
if (logShutdown) {
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
}
|
||||
try {
|
||||
run.run();
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -31,6 +31,12 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "the directory containing files to check")
|
||||
protected Path directory;
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return local;
|
||||
}
|
||||
|
||||
public static void handleException(ConstraintViolationException e, String resource) {
|
||||
stdErr("\t@|fg(red) Unable to parse {0} due to the following error(s):|@", resource);
|
||||
e.getConstraintViolations()
|
||||
@@ -68,10 +74,9 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
}
|
||||
}
|
||||
|
||||
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation
|
||||
// bug in micronaut, we can't inject ModelValidator, so we inject from implementation
|
||||
public Integer call(
|
||||
Class<?> cls,
|
||||
YamlParser yamlParser,
|
||||
ModelValidator modelValidator,
|
||||
Function<Object, String> identity,
|
||||
Function<Object, List<String>> warningsFunction,
|
||||
@@ -88,7 +93,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.forEach(path -> {
|
||||
try {
|
||||
Object parse = yamlParser.parse(path.toFile(), cls);
|
||||
Object parse = YamlParser.parse(path.toFile(), cls);
|
||||
modelValidator.validate(parse);
|
||||
stdOut("@|green \u2713|@ - " + identity.apply(parse));
|
||||
List<String> warnings = warningsFunction.apply(parse);
|
||||
|
||||
@@ -18,6 +18,8 @@ import picocli.CommandLine;
|
||||
FlowNamespaceCommand.class,
|
||||
FlowDotCommand.class,
|
||||
FlowExportCommand.class,
|
||||
FlowUpdateCommand.class,
|
||||
FlowUpdatesCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
|
||||
@@ -29,8 +29,7 @@ public class FlowDotCommand extends AbstractCommand {
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
YamlParser parser = applicationContext.getBean(YamlParser.class);
|
||||
Flow flow = parser.parse(file.toFile(), Flow.class);
|
||||
Flow flow = YamlParser.parse(file.toFile(), Flow.class);
|
||||
|
||||
GraphCluster graph = GraphUtils.of(flow, null);
|
||||
|
||||
|
||||
@@ -20,9 +20,6 @@ public class FlowExpandCommand extends AbstractCommand {
|
||||
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
|
||||
private Path file;
|
||||
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@@ -31,7 +28,7 @@ public class FlowExpandCommand extends AbstractCommand {
|
||||
super.call();
|
||||
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
|
||||
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
|
||||
Flow flow = yamlParser.parse(content, Flow.class);
|
||||
Flow flow = YamlParser.parse(content, Flow.class);
|
||||
modelValidator.validate(flow);
|
||||
stdOut(content);
|
||||
return 0;
|
||||
|
||||
@@ -33,6 +33,9 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
|
||||
public String namespace;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -58,8 +61,12 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
body = String.join("\n---\n", flows);
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
String namespaceQuery = "";
|
||||
if (namespace != null) {
|
||||
namespaceQuery = "&namespace=" + namespace;
|
||||
}
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/bulk") + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/bulk") + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
@@ -80,4 +87,9 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
@@ -16,8 +15,6 @@ import java.util.List;
|
||||
description = "Validate a flow"
|
||||
)
|
||||
public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -28,23 +25,22 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
Flow.class,
|
||||
yamlParser,
|
||||
FlowWithSource.class,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
Flow flow = (Flow) object;
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
return flow.getNamespace() + " / " + flow.getId();
|
||||
},
|
||||
(Object object) -> {
|
||||
Flow flow = (Flow) object;
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow));
|
||||
warnings.addAll(flowService.warnings(flow, this.tenantId));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
Flow flow = (Flow) object;
|
||||
return flowService.relocations(flow.generateSource()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
@@ -27,8 +26,6 @@ import java.util.List;
|
||||
)
|
||||
@Slf4j
|
||||
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
@Inject
|
||||
public YamlParser yamlParser;
|
||||
|
||||
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
|
||||
public boolean override = false;
|
||||
|
||||
@@ -1,31 +1,37 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import picocli.CommandLine;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import picocli.CommandLine.Command;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
name = "plugins",
|
||||
description = "Manage plugins",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
PluginInstallCommand.class,
|
||||
PluginUninstallCommand.class,
|
||||
PluginListCommand.class,
|
||||
PluginDocCommand.class
|
||||
PluginDocCommand.class,
|
||||
PluginSearchCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class PluginCommand extends AbstractCommand {
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package io.kestra.cli.commands.plugins;
|
||||
import com.google.common.io.Files;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.docs.DocumentationGenerator;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -42,8 +43,10 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
super.call();
|
||||
DocumentationGenerator documentationGenerator = applicationContext.getBean(DocumentationGenerator.class);
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
PluginRegistry registry = pluginRegistryProvider.get();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
boolean hasFailures = false;
|
||||
|
||||
for (RegisteredPlugin registeredPlugin : plugins) {
|
||||
try {
|
||||
documentationGenerator
|
||||
@@ -100,4 +103,10 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
|
||||
return hasFailures ? 1 : 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,98 +1,123 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginCatalogService;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.plugins.PluginDownloader;
|
||||
import io.kestra.cli.plugins.RepositoryConfig;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
name = "install",
|
||||
description = "Install plugins"
|
||||
)
|
||||
public class PluginInstallCommand extends AbstractCommand {
|
||||
@CommandLine.Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates.")
|
||||
|
||||
@Option(names = {"--locally"}, description = "Specifies if plugins must be installed locally. If set to false the installation depends on your Kestra configuration.")
|
||||
boolean locally = true;
|
||||
|
||||
@Option(names = {"--all"}, description = "Install all available plugins")
|
||||
boolean all = false;
|
||||
|
||||
@Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@CommandLine.Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
@Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
private URI[] repositories;
|
||||
|
||||
@CommandLine.Spec
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
private PluginDownloader pluginDownloader;
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Inject
|
||||
@Client("api") HttpClient httpClient;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (this.pluginsPath == null) {
|
||||
if (this.locally && this.pluginsPath == null) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " +
|
||||
"or environment variable 'KESTRA_PLUGINS_PATH"
|
||||
);
|
||||
}
|
||||
|
||||
if (!pluginsPath.toFile().exists()) {
|
||||
if (!pluginsPath.toFile().mkdir()) {
|
||||
throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
List<MavenPluginRepositoryConfig> repositoryConfigs = List.of();
|
||||
if (repositories != null) {
|
||||
Arrays.stream(repositories)
|
||||
.forEach(throwConsumer(s -> {
|
||||
URIBuilder uriBuilder = new URIBuilder(s);
|
||||
|
||||
RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder()
|
||||
repositoryConfigs = Arrays.stream(repositories)
|
||||
.map(uri -> {
|
||||
MavenPluginRepositoryConfig.MavenPluginRepositoryConfigBuilder builder = MavenPluginRepositoryConfig
|
||||
.builder()
|
||||
.id(IdUtils.create());
|
||||
|
||||
if (uriBuilder.getUserInfo() != null) {
|
||||
int index = uriBuilder.getUserInfo().indexOf(":");
|
||||
|
||||
builder.basicAuth(new RepositoryConfig.BasicAuth(
|
||||
uriBuilder.getUserInfo().substring(0, index),
|
||||
uriBuilder.getUserInfo().substring(index + 1)
|
||||
String userInfo = uri.getUserInfo();
|
||||
if (userInfo != null) {
|
||||
String[] userInfoParts = userInfo.split(":");
|
||||
builder = builder.basicAuth(new MavenPluginRepositoryConfig.BasicAuth(
|
||||
userInfoParts[0],
|
||||
userInfoParts[1]
|
||||
));
|
||||
|
||||
uriBuilder.setUserInfo(null);
|
||||
}
|
||||
|
||||
builder.url(uriBuilder.build().toString());
|
||||
|
||||
pluginDownloader.addRepository(builder.build());
|
||||
}));
|
||||
builder.url(UriBuilder.of(uri).userInfo(null).build().toString());
|
||||
return builder.build();
|
||||
}).toList();
|
||||
}
|
||||
|
||||
List<URL> resolveUrl = pluginDownloader.resolve(dependencies);
|
||||
stdOut("Resolved Plugin(s) with {0}", resolveUrl);
|
||||
if (all) {
|
||||
PluginCatalogService service = new PluginCatalogService(httpClient, false, true);
|
||||
dependencies = service.get().stream().map(Objects::toString).toList();
|
||||
}
|
||||
|
||||
for (URL url: resolveUrl) {
|
||||
Files.copy(
|
||||
Paths.get(url.toURI()),
|
||||
Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
if (dependencies.isEmpty()) {
|
||||
stdErr("Error: No plugin to install.");
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
final List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
try (final PluginManager pluginManager = getPluginManager()) {
|
||||
List<PluginArtifact> installed = pluginManager.install(
|
||||
pluginArtifacts,
|
||||
repositoryConfigs,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
}
|
||||
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath);
|
||||
|
||||
return 0;
|
||||
private PluginManager getPluginManager() {
|
||||
return locally ? new LocalPluginManager(mavenPluginRepositoryProvider.get()) : this.pluginManagerProvider.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,22 +1,31 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
name = "list",
|
||||
description = "List all plugins already installed"
|
||||
)
|
||||
public class PluginListCommand extends AbstractCommand {
|
||||
@CommandLine.Spec
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
@Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
private boolean core = false;
|
||||
|
||||
@Inject
|
||||
private PluginRegistry registry;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -27,7 +36,8 @@ public class PluginListCommand extends AbstractCommand {
|
||||
);
|
||||
}
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
|
||||
plugins.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -0,0 +1,149 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
name = "search",
|
||||
description = "Search for available Kestra plugins"
|
||||
)
|
||||
public class PluginSearchCommand extends AbstractCommand {
|
||||
@Inject
|
||||
@Client("api")
|
||||
private HttpClient httpClient;
|
||||
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
private static final char SPACE = ' ';
|
||||
|
||||
@Parameters(index = "0", description = "Search term (optional)", defaultValue = "")
|
||||
private String searchTerm;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try {
|
||||
JsonNode root = fetchPlugins();
|
||||
List<PluginInfo> plugins = findPlugins(root);
|
||||
printResults(plugins);
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
stdOut("Error processing plugins: {0}", e.getMessage());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private JsonNode fetchPlugins() throws Exception {
|
||||
String response = httpClient.toBlocking()
|
||||
.retrieve(
|
||||
HttpRequest.GET("/v1/plugins")
|
||||
.header("Accept", "application/json")
|
||||
);
|
||||
return MAPPER.readTree(response);
|
||||
}
|
||||
|
||||
private List<PluginInfo> findPlugins(JsonNode root) {
|
||||
String searchTermLower = searchTerm.toLowerCase();
|
||||
List<PluginInfo> plugins = new ArrayList<>();
|
||||
|
||||
for (JsonNode plugin : root) {
|
||||
if (matchesSearch(plugin, searchTermLower)) {
|
||||
plugins.add(new PluginInfo(
|
||||
plugin.path("name").asText(),
|
||||
plugin.path("title").asText(),
|
||||
plugin.path("group").asText(),
|
||||
plugin.path("version").asText("")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
plugins.sort((p1, p2) -> p1.name.compareToIgnoreCase(p2.name));
|
||||
return plugins;
|
||||
}
|
||||
|
||||
private boolean matchesSearch(JsonNode plugin, String term) {
|
||||
if (term.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return plugin.path("name").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("title").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("group").asText().toLowerCase().contains(term);
|
||||
}
|
||||
|
||||
private void printResults(List<PluginInfo> plugins) {
|
||||
if (plugins.isEmpty()) {
|
||||
stdOut("No plugins found{0}",
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'");
|
||||
return;
|
||||
}
|
||||
|
||||
stdOut("\nFound {0} plugins{1}",
|
||||
plugins.size(),
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'"
|
||||
);
|
||||
|
||||
printPluginsTable(plugins);
|
||||
}
|
||||
|
||||
private void printPluginsTable(List<PluginInfo> plugins) {
|
||||
int maxName = 4, maxTitle = 5, maxGroup = 5;
|
||||
for (PluginInfo plugin : plugins) {
|
||||
maxName = Math.max(maxName, plugin.name.length());
|
||||
maxTitle = Math.max(maxTitle, plugin.title.length());
|
||||
maxGroup = Math.max(maxGroup, plugin.group.length());
|
||||
}
|
||||
|
||||
StringBuilder namePad = new StringBuilder(maxName);
|
||||
StringBuilder titlePad = new StringBuilder(maxTitle);
|
||||
StringBuilder groupPad = new StringBuilder(maxGroup);
|
||||
|
||||
stdOut("");
|
||||
printRow(namePad, titlePad, groupPad, "NAME", "TITLE", "GROUP", "VERSION",
|
||||
maxName, maxTitle, maxGroup);
|
||||
|
||||
for (PluginInfo plugin : plugins) {
|
||||
printRow(namePad, titlePad, groupPad, plugin.name, plugin.title, plugin.group, plugin.version,
|
||||
maxName, maxTitle, maxGroup);
|
||||
}
|
||||
stdOut("");
|
||||
}
|
||||
|
||||
private void printRow(StringBuilder namePad, StringBuilder titlePad, StringBuilder groupPad,
|
||||
String name, String title, String group, String version,
|
||||
int maxName, int maxTitle, int maxGroup) {
|
||||
stdOut("{0} {1} {2} {3}",
|
||||
pad(namePad, name, maxName),
|
||||
pad(titlePad, title, maxTitle),
|
||||
pad(groupPad, group, maxGroup),
|
||||
version
|
||||
);
|
||||
}
|
||||
|
||||
private String pad(StringBuilder sb, String str, int length) {
|
||||
sb.setLength(0);
|
||||
sb.append(str);
|
||||
while (sb.length() < length) {
|
||||
sb.append(SPACE);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private record PluginInfo(String name, String title, String group, String version) {}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "uninstall",
|
||||
description = "Uninstall plugins"
|
||||
)
|
||||
public class PluginUninstallCommand extends AbstractCommand {
|
||||
@Parameters(index = "0..*", description = "The plugins to uninstall. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
final PluginManager pluginManager;
|
||||
|
||||
// If a PLUGIN_PATH is provided, then use the LocalPluginManager
|
||||
if (pluginsPath != null) {
|
||||
pluginManager = new LocalPluginManager(mavenPluginRepositoryProvider.get());
|
||||
} else {
|
||||
// Otherwise, we delegate to the configured plugin-manager.
|
||||
pluginManager = this.pluginManagerProvider.get();
|
||||
}
|
||||
|
||||
List<PluginArtifact> uninstalled = pluginManager.uninstall(
|
||||
pluginArtifacts,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = uninstalled.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully uninstalled plugins {0} from {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,20 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import picocli.CommandLine;
|
||||
|
||||
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
|
||||
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
|
||||
Integer serverPort;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
|
||||
return super.call();
|
||||
}
|
||||
|
||||
protected static int defaultWorkerThread() {
|
||||
return Runtime.getRuntime().availableProcessors() * 4;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
@@ -9,7 +8,6 @@ import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
@@ -20,7 +18,6 @@ import java.util.Map;
|
||||
name = "executor",
|
||||
description = "Start the Kestra executor"
|
||||
)
|
||||
@Slf4j
|
||||
public class ExecutorCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -66,13 +63,10 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
|
||||
executorService.run();
|
||||
|
||||
log.info("Executor started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Map;
|
||||
@@ -16,7 +14,6 @@ import java.util.Map;
|
||||
name = "indexer",
|
||||
description = "Start the Kestra indexer"
|
||||
)
|
||||
@Slf4j
|
||||
public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -31,13 +28,10 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
|
||||
indexer.run();
|
||||
|
||||
log.info("Indexer started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.schedulers.AbstractScheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -31,12 +30,10 @@ public class SchedulerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
|
||||
scheduler.run();
|
||||
|
||||
log.info("Scheduler started");
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -12,7 +12,6 @@ import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.File;
|
||||
@@ -25,7 +24,6 @@ import java.util.Map;
|
||||
name = "standalone",
|
||||
description = "Start the standalone all-in-one server"
|
||||
)
|
||||
@Slf4j
|
||||
public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
@@ -91,11 +89,11 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
this.skipExecutionService.setSkipFlows(skipFlows);
|
||||
this.skipExecutionService.setSkipNamespaces(skipNamespaces);
|
||||
this.skipExecutionService.setSkipTenants(skipTenants);
|
||||
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(workerThread, null);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
if (flowPath != null) {
|
||||
try {
|
||||
@@ -124,8 +122,6 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
fileWatcher.startListeningFromConfig();
|
||||
}
|
||||
|
||||
this.shutdownHook(standAloneRunner::close);
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
@@ -57,20 +55,11 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
|
||||
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
|
||||
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
|
||||
shutdownHook(false, () -> poolExecutor.shutdown());
|
||||
}
|
||||
|
||||
log.info("Webserver started");
|
||||
this.shutdownHook(() -> {
|
||||
this.close();
|
||||
KestraContext.getContext().shutdown();
|
||||
});
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void close() {
|
||||
if (this.poolExecutor != null) {
|
||||
this.poolExecutor.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import io.kestra.core.runners.Worker;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@@ -18,7 +17,6 @@ import java.util.UUID;
|
||||
name = "worker",
|
||||
description = "Start the Kestra worker"
|
||||
)
|
||||
@Slf4j
|
||||
public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Inject
|
||||
@@ -39,8 +37,11 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
|
||||
KestraContext.getContext().injectWorkerConfigs(thread, workerGroupKey);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
|
||||
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
|
||||
}
|
||||
@@ -52,13 +53,6 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
worker.run();
|
||||
|
||||
if (this.workerGroupKey != null) {
|
||||
log.info("Worker started with {} thread(s) in group '{}'", this.thread, this.workerGroupKey);
|
||||
}
|
||||
else {
|
||||
log.info("Worker started with {} thread(s)", this.thread);
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -2,6 +2,7 @@ package io.kestra.cli.commands.sys;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -9,6 +10,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "reindex",
|
||||
@@ -33,8 +35,8 @@ public class ReindexCommand extends AbstractCommand {
|
||||
List<Flow> allFlow = flowRepository.findAllForAllTenants();
|
||||
allFlow.stream()
|
||||
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
|
||||
.filter(flow -> flow != null)
|
||||
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow()));
|
||||
.filter(Objects::nonNull)
|
||||
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
|
||||
|
||||
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -16,8 +15,6 @@ import java.util.Collections;
|
||||
)
|
||||
@TemplateEnabled
|
||||
public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
@@ -26,7 +23,6 @@ public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
Template.class,
|
||||
yamlParser,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
Template template = (Template) object;
|
||||
|
||||
@@ -10,7 +10,6 @@ import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -27,8 +26,6 @@ import jakarta.validation.ConstraintViolationException;
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
@Inject
|
||||
public YamlParser yamlParser;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -38,7 +35,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
|
||||
List<Template> templates = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> yamlParser.parse(path.toFile(), Template.class))
|
||||
.map(path -> YamlParser.parse(path.toFile(), Template.class))
|
||||
.toList();
|
||||
|
||||
if (templates.isEmpty()) {
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.repository.internal.MavenRepositorySystemUtils;
|
||||
import org.eclipse.aether.DefaultRepositorySystemSession;
|
||||
import org.eclipse.aether.RepositorySystem;
|
||||
import org.eclipse.aether.RepositorySystemSession;
|
||||
import org.eclipse.aether.artifact.Artifact;
|
||||
import org.eclipse.aether.artifact.DefaultArtifact;
|
||||
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
|
||||
import org.eclipse.aether.impl.DefaultServiceLocator;
|
||||
import org.eclipse.aether.repository.LocalRepository;
|
||||
import org.eclipse.aether.repository.RemoteRepository;
|
||||
import org.eclipse.aether.resolution.*;
|
||||
import org.eclipse.aether.spi.connector.RepositoryConnectorFactory;
|
||||
import org.eclipse.aether.spi.connector.transport.TransporterFactory;
|
||||
import org.eclipse.aether.transport.file.FileTransporterFactory;
|
||||
import org.eclipse.aether.transport.http.HttpTransporterFactory;
|
||||
import org.eclipse.aether.util.repository.AuthenticationBuilder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class PluginDownloader {
|
||||
private final List<RepositoryConfig> repositoryConfigs;
|
||||
private final RepositorySystem system;
|
||||
private final RepositorySystemSession session;
|
||||
|
||||
@Inject
|
||||
public PluginDownloader(
|
||||
List<RepositoryConfig> repositoryConfigs,
|
||||
@Nullable @Value("${kestra.plugins.local-repository-path}") String localRepositoryPath
|
||||
) {
|
||||
this.repositoryConfigs = repositoryConfigs;
|
||||
this.system = repositorySystem();
|
||||
this.session = repositorySystemSession(system, localRepositoryPath);
|
||||
}
|
||||
|
||||
public void addRepository(RepositoryConfig repositoryConfig) {
|
||||
this.repositoryConfigs.add(repositoryConfig);
|
||||
}
|
||||
|
||||
public List<URL> resolve(List<String> dependencies) throws MalformedURLException, ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<RemoteRepository> repositories = remoteRepositories();
|
||||
|
||||
List<ArtifactResult> artifactResults = resolveArtifacts(repositories, dependencies);
|
||||
List<URL> localUrls = resolveUrls(artifactResults);
|
||||
log.debug("Resolved Plugin {} with {}", dependencies, localUrls);
|
||||
|
||||
return localUrls;
|
||||
}
|
||||
|
||||
private List<RemoteRepository> remoteRepositories() {
|
||||
return repositoryConfigs
|
||||
.stream()
|
||||
.map(repositoryConfig -> {
|
||||
var build = new RemoteRepository.Builder(
|
||||
repositoryConfig.getId(),
|
||||
"default",
|
||||
repositoryConfig.getUrl()
|
||||
);
|
||||
|
||||
if (repositoryConfig.getBasicAuth() != null) {
|
||||
var authenticationBuilder = new AuthenticationBuilder();
|
||||
authenticationBuilder.addUsername(repositoryConfig.getBasicAuth().getUsername());
|
||||
authenticationBuilder.addPassword(repositoryConfig.getBasicAuth().getPassword());
|
||||
|
||||
build.setAuthentication(authenticationBuilder.build());
|
||||
}
|
||||
|
||||
return build.build();
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static RepositorySystem repositorySystem() {
|
||||
DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator();
|
||||
locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
|
||||
locator.addService(TransporterFactory.class, FileTransporterFactory.class);
|
||||
locator.addService(TransporterFactory.class, HttpTransporterFactory.class);
|
||||
|
||||
return locator.getService(RepositorySystem.class);
|
||||
}
|
||||
|
||||
private RepositorySystemSession repositorySystemSession(RepositorySystem system, String localRepositoryPath) {
|
||||
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
|
||||
|
||||
if (localRepositoryPath == null) {
|
||||
try {
|
||||
final String tempDirectory = Files.createTempDirectory(this.getClass().getSimpleName().toLowerCase())
|
||||
.toAbsolutePath()
|
||||
.toString();
|
||||
|
||||
localRepositoryPath = tempDirectory;
|
||||
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
FileUtils.deleteDirectory(new File(tempDirectory));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
LocalRepository localRepo = new LocalRepository(localRepositoryPath);
|
||||
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
private List<ArtifactResult> resolveArtifacts(List<RemoteRepository> repositories, List<String> dependencies) throws ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<ArtifactResult> results = new ArrayList<>(dependencies.size());
|
||||
for (String dependency: dependencies) {
|
||||
var artifact = new DefaultArtifact(dependency);
|
||||
var version = system.resolveVersionRange(session, new VersionRangeRequest(artifact, repositories, null));
|
||||
var artifactRequest = new ArtifactRequest(
|
||||
new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "jar", version.getHighestVersion().toString()),
|
||||
repositories,
|
||||
null
|
||||
);
|
||||
var artifactResult = system.resolveArtifact(session, artifactRequest);
|
||||
results.add(artifactResult);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<URL> resolveUrls(List<ArtifactResult> artifactResults) throws MalformedURLException {
|
||||
ImmutableList.Builder<URL> urls = ImmutableList.builder();
|
||||
for (ArtifactResult artifactResult : artifactResults) {
|
||||
URL url;
|
||||
url = artifactResult.getArtifact().getFile().toPath().toUri().toURL();
|
||||
urls.add(url);
|
||||
}
|
||||
return urls.build();
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class RepositoryConfig {
|
||||
String id;
|
||||
|
||||
String url;
|
||||
|
||||
BasicAuth basicAuth;
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public static class BasicAuth {
|
||||
private String username;
|
||||
private String password;
|
||||
}
|
||||
|
||||
public RepositoryConfig(@Parameter String id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,23 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.exceptions.FlowProcessingException;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.FlowWithPath;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Singleton;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.*;
|
||||
@@ -40,9 +41,6 @@ public class FileChangedEventListener {
|
||||
@Inject
|
||||
private PluginDefaultService pluginDefaultService;
|
||||
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@@ -59,7 +57,6 @@ public class FileChangedEventListener {
|
||||
|
||||
private boolean isStarted = false;
|
||||
|
||||
|
||||
@Inject
|
||||
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
|
||||
this.fileWatchConfiguration = fileWatchConfiguration;
|
||||
@@ -68,7 +65,7 @@ public class FileChangedEventListener {
|
||||
|
||||
public void startListeningFromConfig() throws IOException, InterruptedException {
|
||||
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface, pluginDefaultService);
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
|
||||
List<Path> paths = fileWatchConfiguration.getPaths();
|
||||
this.setup(paths);
|
||||
|
||||
@@ -76,7 +73,7 @@ public class FileChangedEventListener {
|
||||
// Init existing flows not already in files
|
||||
flowListeners.listen(flows -> {
|
||||
if (!isStarted) {
|
||||
for (FlowWithSource flow : flows) {
|
||||
for (FlowInterface flow : flows) {
|
||||
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
|
||||
flowToFile(flow, this.buildPath(flow));
|
||||
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
|
||||
@@ -137,7 +134,7 @@ public class FileChangedEventListener {
|
||||
try {
|
||||
String content = Files.readString(filePath, Charset.defaultCharset());
|
||||
|
||||
Optional<Flow> flow = parseFlow(content, entry);
|
||||
Optional<FlowWithSource> flow = parseFlow(content, entry);
|
||||
if (flow.isPresent()) {
|
||||
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
// Check if we already have a file with the given path
|
||||
@@ -156,7 +153,7 @@ public class FileChangedEventListener {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
|
||||
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
|
||||
}
|
||||
|
||||
@@ -207,11 +204,11 @@ public class FileChangedEventListener {
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) {
|
||||
String content = Files.readString(file, Charset.defaultCharset());
|
||||
Optional<Flow> flow = parseFlow(content, file);
|
||||
Optional<FlowWithSource> flow = parseFlow(content, file);
|
||||
|
||||
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
|
||||
flows.add(FlowWithPath.of(flow.get(), file.toString()));
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
@@ -223,27 +220,25 @@ public class FileChangedEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
private void flowToFile(FlowWithSource flow, Path path) {
|
||||
private void flowToFile(FlowInterface flow, Path path) {
|
||||
Path defaultPath = path != null ? path : this.buildPath(flow);
|
||||
|
||||
try {
|
||||
Files.writeString(defaultPath, flow.getSource());
|
||||
Files.writeString(defaultPath, flow.source());
|
||||
log.info("Flow {} has been written to file {}", flow.getId(), defaultPath);
|
||||
} catch (IOException e) {
|
||||
log.error("Error writing file: {}", defaultPath, e);
|
||||
}
|
||||
}
|
||||
|
||||
private Optional<Flow> parseFlow(String content, Path entry) {
|
||||
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
|
||||
try {
|
||||
Flow flow = yamlParser.parse(content, Flow.class);
|
||||
FlowWithSource withPluginDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
modelValidator.validate(withPluginDefault);
|
||||
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
|
||||
modelValidator.validate(flow);
|
||||
return Optional.of(flow);
|
||||
} catch (ConstraintViolationException e) {
|
||||
} catch (ConstraintViolationException | FlowProcessingException e) {
|
||||
log.warn("Error while parsing flow: {}", entry, e);
|
||||
}
|
||||
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
@@ -259,7 +254,7 @@ public class FileChangedEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
private Path buildPath(Flow flow) {
|
||||
private Path buildPath(FlowInterface flow) {
|
||||
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
|
||||
public interface FlowFilesManager {
|
||||
|
||||
FlowWithSource createOrUpdateFlow(Flow flow, String content);
|
||||
FlowWithSource createOrUpdateFlow(GenericFlow flow);
|
||||
|
||||
void deleteFlow(FlowWithSource toDelete);
|
||||
|
||||
|
||||
@@ -1,27 +1,23 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public class LocalFlowFileWatcher implements FlowFilesManager {
|
||||
private final FlowRepositoryInterface flowRepository;
|
||||
private final PluginDefaultService pluginDefaultService;
|
||||
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository, PluginDefaultService pluginDefaultService) {
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
|
||||
this.flowRepository = flowRepository;
|
||||
this.pluginDefaultService = pluginDefaultService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowWithSource createOrUpdateFlow(Flow flow, String content) {
|
||||
FlowWithSource withDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
|
||||
return flowRepository.findById(null, flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous, content, withDefault))
|
||||
.orElseGet(() -> flowRepository.create(flow, content, withDefault));
|
||||
.map(previous -> flowRepository.update(flow, previous))
|
||||
.orElseGet(() -> flowRepository.create(flow));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
micronaut:
|
||||
application:
|
||||
name: kestra
|
||||
# Disable Micronaut Open Telemetry
|
||||
otel:
|
||||
enabled: false
|
||||
router:
|
||||
static-resources:
|
||||
swagger:
|
||||
@@ -165,7 +168,7 @@ kestra:
|
||||
values:
|
||||
recoverMissedSchedules: ALL
|
||||
variables:
|
||||
env-vars-prefix: KESTRA_
|
||||
env-vars-prefix: ENV_
|
||||
cache-enabled: true
|
||||
cache-size: 1000
|
||||
|
||||
|
||||
@@ -13,8 +13,7 @@ import picocli.CommandLine;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class AppTest {
|
||||
@@ -26,7 +25,7 @@ class AppTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(App.class, ctx, "--help");
|
||||
|
||||
assertThat(out.toString(), containsString("kestra"));
|
||||
assertThat(out.toString()).contains("kestra");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,7 +41,7 @@ class AppTest {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
|
||||
|
||||
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
|
||||
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
|
||||
assertThat(out.toString()).startsWith("Usage: kestra server " + serverType);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,9 +55,9 @@ class AppTest {
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString(), startsWith("Missing required parameters: "));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace update "));
|
||||
assertThat(out.toString(), not(containsString("MissingParameterException: ")));
|
||||
assertThat(out.toString()).startsWith("Missing required parameters: ");
|
||||
assertThat(out.toString()).contains("Usage: kestra flow namespace update ");
|
||||
assertThat(out.toString()).doesNotContain("MissingParameterException: ");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,8 +8,7 @@ import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class ServerCommandValidatorTest {
|
||||
|
||||
@@ -40,8 +39,8 @@ class ServerCommandValidatorTest {
|
||||
.start()
|
||||
);
|
||||
final Throwable rootException = getRootException(exception);
|
||||
assertThat(rootException.getClass(), is(ServerCommandValidator.ServerCommandException.class));
|
||||
assertThat(rootException.getMessage(), is("Incomplete server configuration - missing required properties"));
|
||||
assertThat(rootException.getClass()).isEqualTo(ServerCommandValidator.ServerCommandException.class);
|
||||
assertThat(rootException.getMessage()).isEqualTo("Incomplete server configuration - missing required properties");
|
||||
}
|
||||
|
||||
private Throwable getRootException(Throwable exception) {
|
||||
|
||||
@@ -8,8 +8,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class ConfigPropertiesCommandTest {
|
||||
@Test
|
||||
@@ -20,8 +19,8 @@ class ConfigPropertiesCommandTest {
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(ConfigPropertiesCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString(), containsString("activeEnvironments:"));
|
||||
assertThat(out.toString(), containsString("- test"));
|
||||
assertThat(out.toString()).contains("activeEnvironments:");
|
||||
assertThat(out.toString()).contains("- test");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,9 +11,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowCreateOrUpdateCommandTest {
|
||||
@RetryingTest(5) // flaky on CI but cannot be reproduced even with 100 repetitions
|
||||
@@ -38,7 +36,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -53,7 +51,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,7 +78,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
@@ -93,7 +91,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -106,7 +104,7 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,8 +129,8 @@ class FlowCreateOrUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,9 +9,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowDotCommandTest {
|
||||
@Test
|
||||
@@ -26,8 +24,8 @@ class FlowDotCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowDotCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("\"root.date\"[shape=box];"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("\"root.date\"[shape=box];");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,8 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowExpandCommandTest {
|
||||
@SuppressWarnings("deprecation")
|
||||
@@ -23,22 +22,20 @@ class FlowExpandCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowExpandCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), is(
|
||||
"id: include\n" +
|
||||
"namespace: io.kestra.cli\n" +
|
||||
"\n" +
|
||||
"# The list of tasks\n" +
|
||||
"tasks:\n" +
|
||||
"- id: t1\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: \"Lorem ipsum dolor sit amet\"\n" +
|
||||
"- id: t2\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: |\n" +
|
||||
" Lorem ipsum dolor sit amet\n" +
|
||||
" Lorem ipsum dolor sit amet\n"
|
||||
));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).isEqualTo("id: include\n" +
|
||||
"namespace: io.kestra.cli\n" +
|
||||
"\n" +
|
||||
"# The list of tasks\n" +
|
||||
"tasks:\n" +
|
||||
"- id: t1\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: \"Lorem ipsum dolor sit amet\"\n" +
|
||||
"- id: t2\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: |\n" +
|
||||
" Lorem ipsum dolor sit amet\n" +
|
||||
" Lorem ipsum dolor sit amet\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,10 +14,7 @@ import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowExportCommandTest {
|
||||
@Test
|
||||
@@ -32,6 +29,8 @@ class FlowExportCommandTest {
|
||||
|
||||
// we use the update command to add flows to extract
|
||||
String[] updateArgs = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -40,10 +39,12 @@ class FlowExportCommandTest {
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, updateArgs);
|
||||
assertThat(out.toString(), containsString("3 flow(s)"));
|
||||
assertThat(out.toString()).contains("3 flow(s)");
|
||||
|
||||
// then we export them
|
||||
String[] exportArgs = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -54,11 +55,11 @@ class FlowExportCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowExportCommand.class, ctx, exportArgs);
|
||||
File file = new File("/tmp/flows.zip");
|
||||
assertThat(file.exists(), is(true));
|
||||
assertThat(file.exists()).isTrue();
|
||||
ZipFile zipFile = new ZipFile(file);
|
||||
|
||||
// When launching the test in a suite, there is 4 flows but when lauching individualy there is only 3
|
||||
assertThat(zipFile.stream().count(), greaterThanOrEqualTo(3L));
|
||||
assertThat(zipFile.stream().count()).isGreaterThanOrEqualTo(3L);
|
||||
|
||||
file.delete();
|
||||
}
|
||||
|
||||
@@ -10,9 +10,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowUpdatesCommandTest {
|
||||
@Test
|
||||
@@ -28,6 +26,8 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -37,10 +37,12 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("successfully updated !"));
|
||||
assertThat(out.toString()).contains("successfully updated !");
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -52,7 +54,7 @@ class FlowUpdatesCommandTest {
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString(), containsString("successfully updated !"));
|
||||
assertThat(out.toString()).contains("successfully updated !");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,6 +72,8 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -79,11 +83,13 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("4 flow(s)"));
|
||||
assertThat(out.toString()).contains("4 flow(s)");
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -92,10 +98,12 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -105,7 +113,36 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalidWithNamespace() {
|
||||
URL directory = FlowUpdatesCommandTest.class.getClassLoader().getResource("flows");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"--namespace",
|
||||
"io.kestra.cli",
|
||||
"--delete",
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("Invalid entity: flow.namespace: io.kestra.outsider_quattro_-1 - flow namespace is invalid");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,6 +158,8 @@ class FlowUpdatesCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -130,8 +169,8 @@ class FlowUpdatesCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,9 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowValidateCommandTest {
|
||||
@Test
|
||||
@@ -24,8 +22,8 @@ class FlowValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("✓ - io.kestra.cli / include"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,10 +39,10 @@ class FlowValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("✓ - system / warning"));
|
||||
assertThat(out.toString(), containsString("⚠ - tasks[0] is deprecated"));
|
||||
assertThat(out.toString(), containsString("ℹ - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - system / warning");
|
||||
assertThat(out.toString()).contains("⚠ - tasks[0] is deprecated");
|
||||
assertThat(out.toString()).contains("ℹ - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,8 +10,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class SingleFlowCommandsTest {
|
||||
|
||||
@@ -37,7 +36,7 @@ public class SingleFlowCommandsTest {
|
||||
};
|
||||
PicocliRunner.call(FlowDeleteCommand.class, ctx, deleteArgs);
|
||||
|
||||
assertThat(out.toString(), containsString("Flow successfully deleted !"));
|
||||
assertThat(out.toString()).contains("Flow successfully deleted !");
|
||||
out.reset();
|
||||
|
||||
String[] createArgs = {
|
||||
@@ -49,7 +48,7 @@ public class SingleFlowCommandsTest {
|
||||
};
|
||||
PicocliRunner.call(FlowCreateCommand.class, ctx, createArgs);
|
||||
|
||||
assertThat(out.toString(), containsString("Flow successfully created !"));
|
||||
assertThat(out.toString()).contains("Flow successfully created !");
|
||||
|
||||
|
||||
out.reset();String[] updateArgs = {
|
||||
@@ -63,7 +62,7 @@ public class SingleFlowCommandsTest {
|
||||
};
|
||||
PicocliRunner.call(FlowUpdateCommand.class, ctx, updateArgs);
|
||||
|
||||
assertThat(out.toString(), containsString("Flow successfully updated !"));
|
||||
assertThat(out.toString()).contains("Flow successfully updated !");
|
||||
out.reset();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,9 +10,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateValidateCommandTest {
|
||||
@Test
|
||||
@@ -28,9 +26,9 @@ class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flow"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +44,8 @@ class TemplateValidateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -54,9 +54,9 @@ class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flow"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,9 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowNamespaceCommandTest {
|
||||
@Test
|
||||
@@ -21,8 +19,8 @@ class FlowNamespaceCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra flow namespace");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,10 +10,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowNamespaceUpdateCommandTest {
|
||||
@Test
|
||||
@@ -39,7 +36,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
|
||||
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -55,7 +52,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
// 2 delete + 1 update
|
||||
assertThat(out.toString(), containsString("namespace 'io.kestra.cli' successfully updated"));
|
||||
assertThat(out.toString()).contains("namespace 'io.kestra.cli' successfully updated");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,9 +78,9 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse flows"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flows");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,7 +108,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("3 flow(s)"));
|
||||
assertThat(out.toString()).contains("3 flow(s)");
|
||||
out.reset();
|
||||
|
||||
// no "delete" arg should behave as no-delete
|
||||
@@ -125,7 +122,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
out.reset();
|
||||
|
||||
args = new String[]{
|
||||
@@ -139,7 +136,7 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,8 +162,8 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("1 flow(s)"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("1 flow(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,8 +192,8 @@ class FlowNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("io.kestra.override"));
|
||||
assertThat(out.toString(), not(containsString("io.kestra.cli")));
|
||||
assertThat(out.toString()).contains("io.kestra.override");
|
||||
assertThat(out.toString()).doesNotContain("io.kestra.cli");
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,9 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class NamespaceCommandTest {
|
||||
@Test
|
||||
@@ -21,8 +19,8 @@ class NamespaceCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(NamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,9 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class NamespaceFilesCommandTest {
|
||||
@Test
|
||||
@@ -21,8 +19,8 @@ class NamespaceFilesCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(NamespaceFilesCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace files"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace files");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,8 +14,8 @@ import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class NamespaceFilesUpdateCommandTest {
|
||||
@Test
|
||||
@@ -31,6 +31,8 @@ class NamespaceFilesUpdateCommandTest {
|
||||
|
||||
String to = "/some/directory";
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -61,6 +63,8 @@ class NamespaceFilesUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -90,6 +94,8 @@ class NamespaceFilesUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
|
||||
@@ -7,9 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class KvCommandTest {
|
||||
@Test
|
||||
@@ -21,8 +19,8 @@ class KvCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(KvCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra namespace kv"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra namespace kv");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,8 +16,7 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class KvUpdateCommandTest {
|
||||
@Test
|
||||
@@ -28,6 +27,8 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -41,8 +42,8 @@ class KvUpdateCommandTest {
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("string").get(), is(new KVValue("stringValue")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("string").get(), is("\"stringValue\""));
|
||||
assertThat(kvStore.getValue("string").get()).isEqualTo(new KVValue("stringValue"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("string").get()).isEqualTo("\"stringValue\"");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,6 +55,8 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -67,8 +70,8 @@ class KvUpdateCommandTest {
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("int").get(), is(new KVValue(1)));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("int").get(), is("1"));
|
||||
assertThat(kvStore.getValue("int").get()).isEqualTo(new KVValue(1));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("int").get()).isEqualTo("1");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,6 +83,8 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -95,8 +100,8 @@ class KvUpdateCommandTest {
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("intStr").get(), is(new KVValue("1")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("intStr").get(), is("\"1\""));
|
||||
assertThat(kvStore.getValue("intStr").get()).isEqualTo(new KVValue("1"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("intStr").get()).isEqualTo("\"1\"");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,6 +113,8 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -121,8 +128,8 @@ class KvUpdateCommandTest {
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("object").get(), is(new KVValue(Map.of("some", "json"))));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("object").get(), is("{some:\"json\"}"));
|
||||
assertThat(kvStore.getValue("object").get()).isEqualTo(new KVValue(Map.of("some", "json")));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("object").get()).isEqualTo("{some:\"json\"}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,6 +141,8 @@ class KvUpdateCommandTest {
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -149,8 +158,8 @@ class KvUpdateCommandTest {
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("objectStr").get(), is(new KVValue("{\"some\":\"json\"}")));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("objectStr").get(), is("\"{\\\"some\\\":\\\"json\\\"}\""));
|
||||
assertThat(kvStore.getValue("objectStr").get()).isEqualTo(new KVValue("{\"some\":\"json\"}"));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("objectStr").get()).isEqualTo("\"{\\\"some\\\":\\\"json\\\"}\"");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,6 +176,8 @@ class KvUpdateCommandTest {
|
||||
Files.write(file.toPath(), "{\"some\":\"json\",\"from\":\"file\"}".getBytes());
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
@@ -181,8 +192,8 @@ class KvUpdateCommandTest {
|
||||
KVStoreService kvStoreService = ctx.getBean(KVStoreService.class);
|
||||
KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null);
|
||||
|
||||
assertThat(kvStore.getValue("objectFromFile").get(), is(new KVValue(Map.of("some", "json", "from", "file"))));
|
||||
assertThat(((InternalKVStore)kvStore).getRawValue("objectFromFile").get(), is("{some:\"json\",from:\"file\"}"));
|
||||
assertThat(kvStore.getValue("objectFromFile").get()).isEqualTo(new KVValue(Map.of("some", "json", "from", "file")));
|
||||
assertThat(((InternalKVStore) kvStore).getRawValue("objectFromFile").get()).isEqualTo("{some:\"json\",from:\"file\"}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class PluginCommandTest {
|
||||
|
||||
@Test
|
||||
void shouldGetHelps() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(PluginCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString()).contains("Usage: kestra plugins");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,7 @@ import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class PluginDocCommandTest {
|
||||
|
||||
@@ -44,16 +43,16 @@ class PluginDocCommandTest {
|
||||
|
||||
List<Path> files = Files.list(docPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("plugin-template-test"));
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(files.getFirst().getFileName().toString()).isEqualTo("plugin-template-test");
|
||||
var directory = files.getFirst().toFile();
|
||||
assertThat(directory.isDirectory(), is(true));
|
||||
assertThat(directory.listFiles().length, is(3));
|
||||
assertThat(directory.isDirectory()).isTrue();
|
||||
assertThat(directory.listFiles().length).isEqualTo(3);
|
||||
|
||||
var readme = directory.toPath().resolve("index.md");
|
||||
var readmeContent = new String(Files.readAllBytes(readme));
|
||||
|
||||
assertThat(readmeContent, containsString("""
|
||||
assertThat(readmeContent).contains("""
|
||||
---
|
||||
title: Template test
|
||||
description: "Plugin template for Kestra"
|
||||
@@ -61,18 +60,17 @@ class PluginDocCommandTest {
|
||||
|
||||
---
|
||||
# Template test
|
||||
"""));
|
||||
""");
|
||||
|
||||
assertThat(readmeContent, containsString("""
|
||||
assertThat(readmeContent).contains("""
|
||||
Plugin template for Kestra
|
||||
|
||||
This is a more complex description of the plugin.
|
||||
|
||||
This is in markdown and will be inline inside the plugin page.
|
||||
"""));
|
||||
""");
|
||||
|
||||
assertThat(readmeContent, containsString(
|
||||
"""
|
||||
assertThat(readmeContent).contains("""
|
||||
/> Subgroup title
|
||||
|
||||
Subgroup description
|
||||
@@ -89,20 +87,20 @@ class PluginDocCommandTest {
|
||||
\s
|
||||
* [Reporting](./guides/reporting.md)
|
||||
\s
|
||||
"""));
|
||||
""");
|
||||
|
||||
// check @PluginProperty from an interface
|
||||
var task = directory.toPath().resolve("tasks/io.kestra.plugin.templates.ExampleTask.md");
|
||||
String taskDoc = new String(Files.readAllBytes(task));
|
||||
assertThat(taskDoc, containsString("""
|
||||
assertThat(taskDoc).contains("""
|
||||
### `example`
|
||||
* **Type:** ==string==
|
||||
* **Dynamic:** ✔️
|
||||
* **Required:** ❌
|
||||
|
||||
**Example interface**
|
||||
"""));
|
||||
assertThat(taskDoc, containsString("""
|
||||
""");
|
||||
assertThat(taskDoc).contains("""
|
||||
### `from`
|
||||
* **Type:**
|
||||
* ==string==
|
||||
@@ -110,12 +108,12 @@ class PluginDocCommandTest {
|
||||
* [==Example==](#io.kestra.core.models.annotations.example)
|
||||
* **Dynamic:** ✔️
|
||||
* **Required:** ✔️
|
||||
"""));
|
||||
""");
|
||||
|
||||
var authenticationGuide = directory.toPath().resolve("guides/authentication.md");
|
||||
assertThat(new String(Files.readAllBytes(authenticationGuide)), containsString("This is how to authenticate for this plugin:"));
|
||||
assertThat(new String(Files.readAllBytes(authenticationGuide))).contains("This is how to authenticate for this plugin:");
|
||||
var reportingGuide = directory.toPath().resolve("guides/reporting.md");
|
||||
assertThat(new String(Files.readAllBytes(reportingGuide)), containsString("This is the reporting of the plugin:"));
|
||||
assertThat(new String(Files.readAllBytes(reportingGuide))).contains("This is the reporting of the plugin:");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,15 +9,13 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class PluginInstallCommandTest {
|
||||
|
||||
@Test
|
||||
void fixedVersion() throws IOException {
|
||||
void shouldInstallPluginLocallyGivenFixedVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -27,13 +25,13 @@ class PluginInstallCommandTest {
|
||||
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("plugin-notifications-0.6.0.jar"));
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(files.getFirst().getFileName().toString()).isEqualTo("io_kestra_plugin__plugin-notifications__0_6_0.jar");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void latestVersion() throws IOException {
|
||||
void shouldInstallPluginLocallyGivenLatestVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -43,14 +41,14 @@ class PluginInstallCommandTest {
|
||||
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), startsWith("plugin-notifications"));
|
||||
assertThat(files.getFirst().getFileName().toString(), not(containsString("LATEST")));
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(files.getFirst().getFileName().toString()).startsWith("io_kestra_plugin__plugin-notifications__");
|
||||
assertThat(files.getFirst().getFileName().toString()).doesNotContain("LATEST");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void rangeVersion() throws IOException {
|
||||
void shouldInstallPluginLocallyGivenRangeVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -61,8 +59,8 @@ class PluginInstallCommandTest {
|
||||
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("storage-s3-0.12.1.jar"));
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(files.getFirst().getFileName().toString()).isEqualTo("io_kestra_storage__storage-s3__0_12_1.jar");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
@@ -17,15 +16,14 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class PluginListCommandTest {
|
||||
|
||||
private static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.18.0-SNAPSHOT.jar";
|
||||
|
||||
@Test
|
||||
void run() throws IOException, URISyntaxException {
|
||||
void shouldListPluginsInstalledLocally() throws IOException, URISyntaxException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginListCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -42,7 +40,7 @@ class PluginListCommandTest {
|
||||
String[] args = {"--plugins", pluginsPath.toAbsolutePath().toString()};
|
||||
PicocliRunner.call(PluginListCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("io.kestra.plugin.templates.Example"));
|
||||
assertThat(out.toString()).contains("io.kestra.plugin.templates.Example");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,104 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import com.github.tomakehurst.wiremock.junit5.WireMockTest;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.github.tomakehurst.wiremock.client.WireMock.*;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@WireMockTest(httpPort = 28181)
|
||||
class PluginSearchCommandTest {
|
||||
private ByteArrayOutputStream outputStreamCaptor;
|
||||
private final PrintStream originalOut = System.out;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
outputStreamCaptor = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(outputStreamCaptor));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void tearDown() {
|
||||
System.setOut(originalOut);
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchWithExactMatch() {
|
||||
stubFor(get(urlEqualTo("/v1/plugins"))
|
||||
.willReturn(aResponse()
|
||||
.withHeader("Content-Type", "application/json")
|
||||
.withBody("""
|
||||
[
|
||||
{
|
||||
"name": "plugin-notifications",
|
||||
"title": "Notifications",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.6.0"
|
||||
},
|
||||
{
|
||||
"name": "plugin-scripts",
|
||||
"title": "Scripts",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.5.0"
|
||||
}
|
||||
]
|
||||
""")));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder(Environment.CLI, Environment.TEST)
|
||||
.properties(Map.of("micronaut.http.services.api.url", "http://localhost:28181"))
|
||||
.start()) {
|
||||
String[] args = {"notifications"};
|
||||
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
|
||||
|
||||
String output = outputStreamCaptor.toString().trim();
|
||||
assertThat(output).contains("Found 1 plugins matching 'notifications'");
|
||||
assertThat(output).contains("plugin-notifications");
|
||||
assertThat(output).doesNotContain("plugin-scripts");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchWithEmptyQuery() {
|
||||
stubFor(get(urlEqualTo("/v1/plugins"))
|
||||
.willReturn(aResponse()
|
||||
.withHeader("Content-Type", "application/json")
|
||||
.withBody("""
|
||||
[
|
||||
{
|
||||
"name": "plugin-notifications",
|
||||
"title": "Notifications",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.6.0"
|
||||
},
|
||||
{
|
||||
"name": "plugin-scripts",
|
||||
"title": "Scripts",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.5.0"
|
||||
}
|
||||
]
|
||||
""")));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder(Environment.CLI, Environment.TEST)
|
||||
.properties(Map.of("micronaut.http.services.api.url", "http://localhost:28181"))
|
||||
.start()) {
|
||||
|
||||
String[] args = {""};
|
||||
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
|
||||
|
||||
String output = outputStreamCaptor.toString().trim();
|
||||
assertThat(output).contains("Found 2 plugins");
|
||||
assertThat(output).contains("plugin-notifications");
|
||||
assertThat(output).contains("plugin-scripts");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,9 +11,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class ReindexCommandTest {
|
||||
@Test
|
||||
@@ -36,7 +34,7 @@ class ReindexCommandTest {
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, updateArgs);
|
||||
assertThat(out.toString(), containsString("3 flow(s)"));
|
||||
assertThat(out.toString()).contains("3 flow(s)");
|
||||
|
||||
// then we reindex them
|
||||
String[] reindexArgs = {
|
||||
@@ -44,9 +42,9 @@ class ReindexCommandTest {
|
||||
"flow",
|
||||
};
|
||||
Integer call = PicocliRunner.call(ReindexCommand.class, ctx, reindexArgs);
|
||||
assertThat(call, is(0));
|
||||
assertThat(call).isZero();
|
||||
// in local it reindex 3 flows and in CI 4 for an unknown reason
|
||||
assertThat(out.toString(), containsString("Successfully reindex"));
|
||||
assertThat(out.toString()).contains("Successfully reindex");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,9 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class DatabaseCommandTest {
|
||||
@Test
|
||||
@@ -21,8 +19,8 @@ class DatabaseCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(DatabaseCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra sys database"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra sys database");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class StateStoreCommandTest {
|
||||
@Test
|
||||
@@ -22,8 +20,8 @@ class StateStoreCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(StateStoreCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra sys state-store"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra sys state-store");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
package io.kestra.cli.commands.sys.statestore;
|
||||
|
||||
import com.devskiller.friendly_id.FriendlyId;
|
||||
import io.kestra.core.exceptions.MigrationRequiredException;
|
||||
import io.kestra.core.exceptions.ResourceExpiredException;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
import io.kestra.core.storages.StateStore;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.utils.Hashing;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.Slugify;
|
||||
import io.kestra.plugin.core.log.Log;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
@@ -26,9 +25,7 @@ import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class StateStoreMigrateCommandTest {
|
||||
@Test
|
||||
@@ -45,7 +42,7 @@ class StateStoreMigrateCommandTest {
|
||||
.namespace("some.valid.namespace." + ((int) (Math.random() * 1000000)))
|
||||
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
|
||||
.build();
|
||||
flowRepository.create(flow, flow.generateSource(), flow);
|
||||
flowRepository.create(GenericFlow.of(flow));
|
||||
|
||||
StorageInterface storage = ctx.getBean(StorageInterface.class);
|
||||
String tenantId = flow.getTenantId();
|
||||
@@ -56,10 +53,7 @@ class StateStoreMigrateCommandTest {
|
||||
oldStateStoreUri,
|
||||
new ByteArrayInputStream("my-value".getBytes())
|
||||
);
|
||||
assertThat(
|
||||
storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri),
|
||||
is(true)
|
||||
);
|
||||
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isTrue();
|
||||
|
||||
RunContext runContext = ctx.getBean(RunContextFactory.class).of(flow, Map.of("flow", Map.of(
|
||||
"tenantId", tenantId,
|
||||
@@ -72,13 +66,10 @@ class StateStoreMigrateCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(StateStoreMigrateCommand.class, ctx, args);
|
||||
|
||||
assertThat(new String(stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value").readAllBytes()), is("my-value"));
|
||||
assertThat(
|
||||
storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri),
|
||||
is(false)
|
||||
);
|
||||
assertThat(new String(stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value").readAllBytes())).isEqualTo("my-value");
|
||||
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isFalse();
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(call).isZero();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,9 +15,7 @@ import java.net.URL;
|
||||
import java.util.Map;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateExportCommandTest {
|
||||
@Test
|
||||
@@ -42,7 +40,7 @@ class TemplateExportCommandTest {
|
||||
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
assertThat(out.toString(), containsString("3 template(s)"));
|
||||
assertThat(out.toString()).contains("3 template(s)");
|
||||
|
||||
// then we export them
|
||||
String[] exportArgs = {
|
||||
@@ -56,9 +54,9 @@ class TemplateExportCommandTest {
|
||||
};
|
||||
PicocliRunner.call(TemplateExportCommand.class, ctx, exportArgs);
|
||||
File file = new File("/tmp/templates.zip");
|
||||
assertThat(file.exists(), is(true));
|
||||
assertThat(file.exists()).isTrue();
|
||||
ZipFile zipFile = new ZipFile(file);
|
||||
assertThat(zipFile.stream().count(), is(3L));
|
||||
assertThat(zipFile.stream().count()).isEqualTo(3L);
|
||||
|
||||
file.delete();
|
||||
}
|
||||
|
||||
@@ -11,11 +11,9 @@ import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class TemplateValidateCommandTest {
|
||||
class TemplateValidateCommandTest {
|
||||
@Test
|
||||
void runLocal() {
|
||||
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalidsTemplates/template.yml");
|
||||
@@ -29,9 +27,9 @@ public class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse template"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse template");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,9 +53,9 @@ public class TemplateValidateCommandTest {
|
||||
};
|
||||
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse template"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse template");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,9 +7,7 @@ import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateNamespaceCommandTest {
|
||||
@Test
|
||||
@@ -21,8 +19,8 @@ class TemplateNamespaceCommandTest {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(TemplateNamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call, is(0));
|
||||
assertThat(out.toString(), containsString("Usage: kestra template namespace"));
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra template namespace");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,8 +11,7 @@ import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateNamespaceUpdateCommandTest {
|
||||
@Test
|
||||
@@ -37,7 +36,7 @@ class TemplateNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("3 template(s)"));
|
||||
assertThat(out.toString()).contains("3 template(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,8 +63,8 @@ class TemplateNamespaceUpdateCommandTest {
|
||||
Integer call = PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
// assertThat(call, is(1));
|
||||
assertThat(out.toString(), containsString("Unable to parse templates"));
|
||||
assertThat(out.toString(), containsString("must not be empty"));
|
||||
assertThat(out.toString()).contains("Unable to parse templates");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,7 +92,7 @@ class TemplateNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("3 template(s)"));
|
||||
assertThat(out.toString()).contains("3 template(s)");
|
||||
|
||||
String[] newArgs = {
|
||||
"--server",
|
||||
@@ -107,7 +106,7 @@ class TemplateNamespaceUpdateCommandTest {
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, newArgs);
|
||||
|
||||
assertThat(out.toString(), containsString("1 template(s)"));
|
||||
assertThat(out.toString()).contains("1 template(s)");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,8 +10,7 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class DeleteConfigurationApplicationListenersTest {
|
||||
|
||||
@@ -28,7 +27,7 @@ class DeleteConfigurationApplicationListenersTest {
|
||||
);
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(mapPropertySource, Environment.CLI, Environment.TEST)) {
|
||||
assertThat(tempFile.exists(), is(false));
|
||||
assertThat(tempFile.exists()).isFalse();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,8 +19,7 @@ import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@MicronautTest(environments = {"test", "file-watch"}, transactional = false)
|
||||
class FileChangedEventListenerTest {
|
||||
@@ -77,9 +76,9 @@ class FileChangedEventListenerTest {
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Flow myflow = flowRepository.findById(null, "io.kestra.tests.watch", "myflow").orElseThrow();
|
||||
assertThat(myflow.getTasks(), hasSize(1));
|
||||
assertThat(myflow.getTasks().getFirst().getId(), is("hello"));
|
||||
assertThat(myflow.getTasks().getFirst().getType(), is("io.kestra.plugin.core.log.Log"));
|
||||
assertThat(myflow.getTasks()).hasSize(1);
|
||||
assertThat(myflow.getTasks().getFirst().getId()).isEqualTo("hello");
|
||||
assertThat(myflow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
|
||||
|
||||
// delete the flow
|
||||
Files.delete(Path.of(FILE_WATCH + "/myflow.yaml"));
|
||||
@@ -116,9 +115,9 @@ class FileChangedEventListenerTest {
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Flow pluginDefaultFlow = flowRepository.findById(null, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
|
||||
assertThat(pluginDefaultFlow.getTasks(), hasSize(1));
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getId(), is("helloWithDefault"));
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getType(), is("io.kestra.plugin.core.log.Log"));
|
||||
assertThat(pluginDefaultFlow.getTasks()).hasSize(1);
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getId()).isEqualTo("helloWithDefault");
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getType()).isEqualTo("io.kestra.plugin.core.log.Log");
|
||||
|
||||
// delete both files
|
||||
Files.delete(Path.of(FILE_WATCH + "/plugin-default.yaml"));
|
||||
|
||||
@@ -38,6 +38,13 @@ dependencies {
|
||||
implementation group: 'dev.failsafe', name: 'failsafe'
|
||||
api 'org.apache.httpcomponents.client5:httpclient5'
|
||||
|
||||
// plugins
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-impl'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-supplier'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-connector-basic'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-transport-file'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-transport-http'
|
||||
|
||||
// scheduler
|
||||
implementation group: 'com.cronutils', name: 'cron-utils'
|
||||
|
||||
@@ -66,7 +73,7 @@ dependencies {
|
||||
testImplementation "io.micronaut:micronaut-http-server-netty"
|
||||
testImplementation "io.micronaut:micronaut-management"
|
||||
|
||||
testImplementation "org.testcontainers:testcontainers:1.20.4"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.20.4"
|
||||
testImplementation "org.testcontainers:testcontainers:1.20.6"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.20.6"
|
||||
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.80"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package io.kestra.core.contexts;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.plugins.DefaultPluginRegistry;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.serdes.PluginDeserializer;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.StorageInterfaceFactory;
|
||||
import io.micronaut.context.annotation.Bean;
|
||||
@@ -34,7 +33,7 @@ public class KestraBeansFactory {
|
||||
StorageConfig storageConfig;
|
||||
|
||||
@Value("${kestra.storage.type}")
|
||||
Optional<String> storageType;
|
||||
protected Optional<String> storageType;
|
||||
|
||||
@Requires(missingBeans = PluginRegistry.class)
|
||||
@Singleton
|
||||
@@ -42,16 +41,25 @@ public class KestraBeansFactory {
|
||||
return DefaultPluginRegistry.getOrCreate();
|
||||
}
|
||||
|
||||
@Singleton
|
||||
public StorageInterfaceFactory storageInterfaceFactory(final PluginRegistry pluginRegistry){
|
||||
return new StorageInterfaceFactory(pluginRegistry, validator);
|
||||
}
|
||||
|
||||
@Requires(missingBeans = StorageInterface.class)
|
||||
@Singleton
|
||||
@Bean(preDestroy = "close")
|
||||
public StorageInterface storageInterface(final PluginRegistry pluginRegistry) throws IOException {
|
||||
String pluginId = storageType.orElseThrow(() -> new KestraRuntimeException(String.format(
|
||||
public StorageInterface storageInterface(final StorageInterfaceFactory storageInterfaceFactory) throws IOException {
|
||||
String pluginId = getStoragePluginId(storageInterfaceFactory);
|
||||
return storageInterfaceFactory.make(null, pluginId, storageConfig.getStorageConfig(pluginId));
|
||||
}
|
||||
|
||||
public String getStoragePluginId(StorageInterfaceFactory storageInterfaceFactory) {
|
||||
return storageType.orElseThrow(() -> new KestraRuntimeException(String.format(
|
||||
"No storage configured through the application property '%s'. Supported types are: %s"
|
||||
, KESTRA_STORAGE_TYPE_CONFIG,
|
||||
StorageInterfaceFactory.getLoggableStorageIds(pluginRegistry)
|
||||
storageInterfaceFactory.getLoggableStorageIds()
|
||||
)));
|
||||
return StorageInterfaceFactory.make(pluginRegistry, pluginId, storageConfig.getStorageConfig(pluginId), validator);
|
||||
}
|
||||
|
||||
@ConfigurationProperties("kestra")
|
||||
@@ -67,7 +75,7 @@ public class KestraBeansFactory {
|
||||
* @return the configuration.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<String, Object> getStorageConfig(String type) {
|
||||
public Map<String, Object> getStorageConfig(String type) {
|
||||
return (Map<String, Object>) storage.get(StringConvention.CAMEL_CASE.format(type));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,8 @@ import io.micronaut.context.env.Environment;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
@@ -25,7 +27,11 @@ public abstract class KestraContext {
|
||||
private static final AtomicReference<KestraContext> INSTANCE = new AtomicReference<>();
|
||||
|
||||
// Properties
|
||||
private static final String KESTRA_SERVER_TYPE = "kestra.server-type";
|
||||
public static final String KESTRA_SERVER_TYPE = "kestra.server-type";
|
||||
|
||||
// Those properties are injected bases on the CLI args.
|
||||
private static final String KESTRA_WORKER_MAX_NUM_THREADS = "kestra.worker.max-num-threads";
|
||||
private static final String KESTRA_WORKER_GROUP_KEY = "kestra.worker.group-key";
|
||||
|
||||
/**
|
||||
* Gets the current {@link KestraContext}.
|
||||
@@ -54,6 +60,12 @@ public abstract class KestraContext {
|
||||
*/
|
||||
public abstract ServerType getServerType();
|
||||
|
||||
public abstract Optional<Integer> getWorkerMaxNumThreads();
|
||||
|
||||
public abstract Optional<String> getWorkerGroupKey();
|
||||
|
||||
public abstract void injectWorkerConfigs(Integer maxNumThreads, String workerGroupKey);
|
||||
|
||||
/**
|
||||
* Returns the Kestra Version.
|
||||
*
|
||||
@@ -110,6 +122,34 @@ public abstract class KestraContext {
|
||||
.orElse(ServerType.STANDALONE);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public Optional<Integer> getWorkerMaxNumThreads() {
|
||||
return Optional.ofNullable(environment)
|
||||
.flatMap(env -> env.getProperty(KESTRA_WORKER_MAX_NUM_THREADS, Integer.class));
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public Optional<String> getWorkerGroupKey() {
|
||||
return Optional.ofNullable(environment)
|
||||
.flatMap(env -> env.getProperty(KESTRA_WORKER_GROUP_KEY, String.class));
|
||||
}
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public void injectWorkerConfigs(Integer maxNumThreads, String workerGroupKey) {
|
||||
final Map<String, Object> configs = new HashMap<>();
|
||||
Optional.ofNullable(maxNumThreads)
|
||||
.ifPresent(val -> configs.put(KESTRA_WORKER_MAX_NUM_THREADS, val));
|
||||
|
||||
Optional.ofNullable(workerGroupKey)
|
||||
.ifPresent(val -> configs.put(KESTRA_WORKER_GROUP_KEY, val));
|
||||
|
||||
if (!configs.isEmpty()) {
|
||||
environment.addPropertySource("kestra-runtime", configs);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
public void shutdown() {
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
package io.kestra.core.contexts;
|
||||
|
||||
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import lombok.Builder;
|
||||
|
||||
@Builder
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
public record MavenPluginRepositoryConfig(
|
||||
@Parameter
|
||||
String id,
|
||||
String url,
|
||||
@Nullable
|
||||
BasicAuth basicAuth
|
||||
) {
|
||||
|
||||
@Builder
|
||||
@ConfigurationProperties("basic-auth")
|
||||
public record BasicAuth(
|
||||
String username,
|
||||
String password
|
||||
) {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.google.common.base.CaseFormat;
|
||||
import io.kestra.core.models.Plugin;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
import io.kestra.core.models.tasks.runners.TaskRunner;
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.plugins.PluginClassAndMetadata;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Getter
|
||||
@EqualsAndHashCode
|
||||
@@ -21,16 +20,18 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
private Map<String, Object> outputsSchema;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private ClassPluginDocumentation(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls, String alias) {
|
||||
super(jsonSchemaGenerator, cls, baseCls);
|
||||
private ClassPluginDocumentation(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
|
||||
super(jsonSchemaGenerator, plugin.type(), allProperties ? null : plugin.baseClass());
|
||||
|
||||
// plugins metadata
|
||||
this.cls = alias == null ? cls.getName() : alias;
|
||||
Class<? extends T> cls = plugin.type();
|
||||
|
||||
this.cls = plugin.alias() == null ? cls.getName() : plugin.alias();
|
||||
this.group = plugin.group();
|
||||
this.docLicense = plugin.license();
|
||||
this.pluginTitle = plugin.title();
|
||||
this.icon = plugin.icon(cls);
|
||||
if (alias != null) {
|
||||
this.icon = plugin.icon();
|
||||
if (plugin.alias() != null) {
|
||||
replacement = cls.getName();
|
||||
}
|
||||
|
||||
@@ -38,10 +39,10 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
this.subGroup = cls.getPackageName().substring(this.group.length() + 1);
|
||||
}
|
||||
|
||||
this.shortName = alias == null ? cls.getSimpleName() : alias.substring(alias.lastIndexOf('.') + 1);
|
||||
this.shortName = plugin.alias() == null ? cls.getSimpleName() : plugin.alias().substring(plugin.alias().lastIndexOf('.') + 1);
|
||||
|
||||
// outputs
|
||||
this.outputsSchema = jsonSchemaGenerator.outputs(baseCls, cls);
|
||||
this.outputsSchema = jsonSchemaGenerator.outputs(allProperties ? null : plugin.baseClass(), cls);
|
||||
|
||||
if (this.outputsSchema.containsKey("$defs")) {
|
||||
this.defs.putAll((Map<String, Object>) this.outputsSchema.get("$defs"));
|
||||
@@ -67,17 +68,13 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
.toList();
|
||||
}
|
||||
|
||||
if (alias != null) {
|
||||
if (plugin.alias() != null) {
|
||||
this.deprecated = true;
|
||||
}
|
||||
}
|
||||
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, cls, baseCls, null);
|
||||
}
|
||||
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls, String alias) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, cls, baseCls, alias);
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, allProperties);
|
||||
}
|
||||
|
||||
@AllArgsConstructor
|
||||
|
||||
@@ -7,6 +7,7 @@ import io.kestra.core.models.tasks.logs.LogExporter;
|
||||
import io.kestra.core.models.tasks.runners.TaskRunner;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.plugins.PluginClassAndMetadata;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.runners.pebble.Extension;
|
||||
import io.kestra.core.runners.pebble.JsonWriter;
|
||||
@@ -217,7 +218,15 @@ public class DocumentationGenerator {
|
||||
private <T> List<Document> generate(RegisteredPlugin registeredPlugin, List<Class<? extends T>> cls, Class<T> baseCls, String type) {
|
||||
return cls
|
||||
.stream()
|
||||
.map(r -> ClassPluginDocumentation.of(jsonSchemaGenerator, registeredPlugin, r, baseCls))
|
||||
.map(pluginClass -> {
|
||||
PluginClassAndMetadata<T> metadata = PluginClassAndMetadata.create(
|
||||
registeredPlugin,
|
||||
pluginClass,
|
||||
baseCls,
|
||||
null
|
||||
);
|
||||
return ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, true);
|
||||
})
|
||||
.map(pluginDocumentation -> {
|
||||
try {
|
||||
return new Document(
|
||||
@@ -247,15 +256,15 @@ public class DocumentationGenerator {
|
||||
classPluginDocumentation.getCls() + ".md";
|
||||
}
|
||||
|
||||
public static <T> String render(ClassPluginDocumentation<T> classPluginDocumentation) throws IOException {
|
||||
public static String render(ClassPluginDocumentation<?> classPluginDocumentation) throws IOException {
|
||||
return render("task", JacksonMapper.toMap(classPluginDocumentation));
|
||||
}
|
||||
|
||||
public static <T> String render(AbstractClassDocumentation<T> classInputDocumentation) throws IOException {
|
||||
public static String render(AbstractClassDocumentation classInputDocumentation) throws IOException {
|
||||
return render("task", JacksonMapper.toMap(classInputDocumentation));
|
||||
}
|
||||
|
||||
public static <T> String render(String templateName, Map<String, Object> vars) throws IOException {
|
||||
public static String render(String templateName, Map<String, Object> vars) throws IOException {
|
||||
String pebbleTemplate = IOUtils.toString(
|
||||
Objects.requireNonNull(DocumentationGenerator.class.getClassLoader().getResourceAsStream("docs/" + templateName + ".peb")),
|
||||
StandardCharsets.UTF_8
|
||||
|
||||
65
core/src/main/java/io/kestra/core/docs/JsonSchemaCache.java
Normal file
65
core/src/main/java/io/kestra/core/docs/JsonSchemaCache.java
Normal file
@@ -0,0 +1,65 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import io.kestra.core.models.dashboards.Dashboard;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.PluginDefault;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
* Service for getting schemas.
|
||||
*/
|
||||
@Singleton
|
||||
public class JsonSchemaCache {
|
||||
|
||||
private final JsonSchemaGenerator jsonSchemaGenerator;
|
||||
|
||||
private final ConcurrentMap<CacheKey, Map<String, Object>> schemaCache = new ConcurrentHashMap<>();
|
||||
|
||||
private final Map<SchemaType, Class<?>> classesBySchemaType = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Creates a new {@link JsonSchemaCache} instance.
|
||||
*
|
||||
* @param jsonSchemaGenerator The {@link JsonSchemaGenerator}.
|
||||
*/
|
||||
public JsonSchemaCache(final JsonSchemaGenerator jsonSchemaGenerator) {
|
||||
this.jsonSchemaGenerator = Objects.requireNonNull(jsonSchemaGenerator, "JsonSchemaGenerator cannot be null");
|
||||
registerClassForType(SchemaType.FLOW, Flow.class);
|
||||
registerClassForType(SchemaType.TEMPLATE, Template.class);
|
||||
registerClassForType(SchemaType.TASK, Task.class);
|
||||
registerClassForType(SchemaType.TRIGGER, AbstractTrigger.class);
|
||||
registerClassForType(SchemaType.PLUGINDEFAULT, PluginDefault.class);
|
||||
registerClassForType(SchemaType.DASHBOARD, Dashboard.class);
|
||||
}
|
||||
|
||||
public Map<String, Object> getSchemaForType(final SchemaType type,
|
||||
final boolean arrayOf) {
|
||||
return schemaCache.computeIfAbsent(new CacheKey(type, arrayOf), (key) -> {
|
||||
|
||||
Class<?> cls = Optional.ofNullable(classesBySchemaType.get(type))
|
||||
.orElseThrow(() -> new IllegalArgumentException("Cannot found schema for type '" + type + "'"));
|
||||
return jsonSchemaGenerator.schemas(cls, arrayOf);
|
||||
});
|
||||
}
|
||||
|
||||
public void registerClassForType(final SchemaType type, final Class<?> clazz) {
|
||||
classesBySchemaType.put(type, clazz);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
schemaCache.clear();
|
||||
}
|
||||
|
||||
private record CacheKey(SchemaType type, boolean arrayOf) {
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,8 @@ import com.fasterxml.classmate.ResolvedType;
|
||||
import com.fasterxml.classmate.members.HierarchicType;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.fasterxml.jackson.databind.node.TextNode;
|
||||
@@ -47,10 +49,18 @@ import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
|
||||
|
||||
@Singleton
|
||||
public class JsonSchemaGenerator {
|
||||
private static final List<Class<?>> TYPES_RESOLVED_AS_STRING = List.of(Duration.class, LocalTime.class, LocalDate.class, LocalDateTime.class, ZonedDateTime.class, OffsetDateTime.class, OffsetTime.class);
|
||||
|
||||
private static final ObjectMapper MAPPER = JacksonMapper.ofJson().copy()
|
||||
.configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false);
|
||||
|
||||
private static final ObjectMapper YAML_MAPPER = JacksonMapper.ofYaml().copy()
|
||||
.configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false);
|
||||
|
||||
private final PluginRegistry pluginRegistry;
|
||||
|
||||
@Inject
|
||||
@@ -64,13 +74,21 @@ public class JsonSchemaGenerator {
|
||||
return this.schemas(cls, false);
|
||||
}
|
||||
|
||||
private void replaceOneOfWithAnyOf(ObjectNode objectNode) {
|
||||
objectNode.findParents("oneOf").forEach(jsonNode -> {
|
||||
if (jsonNode instanceof ObjectNode oNode) {
|
||||
oNode.set("anyOf", oNode.remove("oneOf"));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public <T> Map<String, Object> schemas(Class<? extends T> cls, boolean arrayOf) {
|
||||
SchemaGeneratorConfigBuilder builder = new SchemaGeneratorConfigBuilder(
|
||||
SchemaVersion.DRAFT_7,
|
||||
OptionPreset.PLAIN_JSON
|
||||
);
|
||||
|
||||
this.build(builder,true);
|
||||
this.build(builder, true);
|
||||
|
||||
SchemaGeneratorConfig schemaGeneratorConfig = builder.build();
|
||||
|
||||
@@ -80,42 +98,69 @@ public class JsonSchemaGenerator {
|
||||
if (arrayOf) {
|
||||
objectNode.put("type", "array");
|
||||
}
|
||||
replaceAnyOfWithOneOf(objectNode);
|
||||
pullOfDefaultFromOneOf(objectNode);
|
||||
replaceOneOfWithAnyOf(objectNode);
|
||||
pullDocumentationAndDefaultFromAnyOf(objectNode);
|
||||
removeRequiredOnPropsWithDefaults(objectNode);
|
||||
|
||||
return JacksonMapper.toMap(objectNode);
|
||||
return MAPPER.convertValue(objectNode, MAP_TYPE_REFERENCE);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Unable to generate jsonschema for '" + cls.getName() + "'", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void replaceAnyOfWithOneOf(ObjectNode objectNode) {
|
||||
objectNode.findParents("anyOf").forEach(jsonNode -> {
|
||||
if (jsonNode instanceof ObjectNode oNode) {
|
||||
oNode.set("oneOf", oNode.remove("anyOf"));
|
||||
private void removeRequiredOnPropsWithDefaults(ObjectNode objectNode) {
|
||||
objectNode.findParents("required").forEach(jsonNode -> {
|
||||
if (jsonNode instanceof ObjectNode clazzSchema && clazzSchema.get("required") instanceof ArrayNode requiredPropsNode && clazzSchema.get("properties") instanceof ObjectNode properties) {
|
||||
List<String> requiredFieldValues = StreamSupport.stream(requiredPropsNode.spliterator(), false)
|
||||
.map(JsonNode::asText)
|
||||
.toList();
|
||||
|
||||
properties.fields().forEachRemaining(e -> {
|
||||
int indexInRequiredArray = requiredFieldValues.indexOf(e.getKey());
|
||||
if (indexInRequiredArray != -1 && e.getValue() instanceof ObjectNode valueNode && valueNode.has("default")) {
|
||||
requiredPropsNode.remove(indexInRequiredArray);
|
||||
}
|
||||
});
|
||||
|
||||
if (requiredPropsNode.isEmpty()) {
|
||||
clazzSchema.remove("required");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// This hack exists because for Property we generate a oneOf for properties that are not strings.
|
||||
// By default, the 'default' is in each oneOf which Monaco editor didn't take into account.
|
||||
// So, we pull off the 'default' from any of the oneOf to the parent.
|
||||
private void pullOfDefaultFromOneOf(ObjectNode objectNode) {
|
||||
objectNode.findParents("oneOf").forEach(jsonNode -> {
|
||||
// This hack exists because for Property we generate a anyOf for properties that are not strings.
|
||||
// By default, the 'default' is in each anyOf which Monaco editor didn't take into account.
|
||||
// So, we pull off the 'default' from any of the anyOf to the parent.
|
||||
// same thing for documentation fields: 'title', 'description', '$deprecated'
|
||||
private void pullDocumentationAndDefaultFromAnyOf(ObjectNode objectNode) {
|
||||
objectNode.findParents("anyOf").forEach(jsonNode -> {
|
||||
if (jsonNode instanceof ObjectNode oNode) {
|
||||
JsonNode oneOf = oNode.get("oneOf");
|
||||
if (oneOf instanceof ArrayNode arrayNode) {
|
||||
JsonNode anyOf = oNode.get("anyOf");
|
||||
if (anyOf instanceof ArrayNode arrayNode) {
|
||||
Iterator<JsonNode> it = arrayNode.elements();
|
||||
JsonNode defaultNode = null;
|
||||
while (it.hasNext() && defaultNode == null) {
|
||||
var nodesToPullUp = new HashMap<String, Optional<JsonNode>>(Map.ofEntries(
|
||||
Map.entry("default", Optional.empty()),
|
||||
Map.entry("title", Optional.empty()),
|
||||
Map.entry("description", Optional.empty()),
|
||||
Map.entry("$deprecated", Optional.empty())
|
||||
));
|
||||
// find nodes to pull up
|
||||
while (it.hasNext() && nodesToPullUp.containsValue(Optional.<JsonNode>empty())) {
|
||||
JsonNode next = it.next();
|
||||
if (next instanceof ObjectNode nextAsObj) {
|
||||
defaultNode = nextAsObj.get("default");
|
||||
nodesToPullUp.entrySet().stream()
|
||||
.filter(node -> node.getValue().isEmpty())
|
||||
.forEach(node -> node
|
||||
.setValue(Optional.ofNullable(
|
||||
nextAsObj.get(node.getKey())
|
||||
)));
|
||||
}
|
||||
}
|
||||
if (defaultNode != null) {
|
||||
oNode.set("default", defaultNode);
|
||||
}
|
||||
// create nodes on parent
|
||||
nodesToPullUp.entrySet().stream()
|
||||
.filter(node -> node.getValue().isPresent())
|
||||
.forEach(node -> oNode.set(node.getKey(), node.getValue().get()));
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -141,7 +186,7 @@ public class JsonSchemaGenerator {
|
||||
|
||||
try {
|
||||
sb.append("Default value is : `")
|
||||
.append(JacksonMapper.ofYaml().writeValueAsString(collectedTypeAttributes.get("default")).trim())
|
||||
.append(YAML_MAPPER.writeValueAsString(collectedTypeAttributes.get("default")).trim())
|
||||
.append("`");
|
||||
} catch (JsonProcessingException ignored) {
|
||||
|
||||
@@ -181,6 +226,7 @@ public class JsonSchemaGenerator {
|
||||
}
|
||||
|
||||
protected void build(SchemaGeneratorConfigBuilder builder, boolean draft7) {
|
||||
// builder.withObjectMapper(builder.getObjectMapper().configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false));
|
||||
builder
|
||||
.with(new JakartaValidationModule(
|
||||
JakartaValidationOption.NOT_NULLABLE_METHOD_IS_REQUIRED,
|
||||
@@ -252,11 +298,11 @@ public class JsonSchemaGenerator {
|
||||
TypeContext context = target.getContext();
|
||||
Class<?> erasedType = javaType.getTypeParameters().getFirst().getErasedType();
|
||||
|
||||
if(String.class.isAssignableFrom(erasedType)) {
|
||||
if (String.class.isAssignableFrom(erasedType)) {
|
||||
return List.of(
|
||||
context.resolve(String.class)
|
||||
);
|
||||
} else if(Object.class.equals(erasedType)) {
|
||||
} else if (Object.class.equals(erasedType)) {
|
||||
return List.of(
|
||||
context.resolve(Object.class)
|
||||
);
|
||||
@@ -296,6 +342,9 @@ public class JsonSchemaGenerator {
|
||||
if (pluginPropertyAnnotation.beta()) {
|
||||
memberAttributes.put("$beta", true);
|
||||
}
|
||||
if (pluginPropertyAnnotation.internalStorageURI()) {
|
||||
memberAttributes.put("$internalStorageURI", true);
|
||||
}
|
||||
}
|
||||
|
||||
Schema schema = member.getAnnotationConsideringFieldAndGetter(Schema.class);
|
||||
@@ -363,7 +412,7 @@ public class JsonSchemaGenerator {
|
||||
// handle deprecated tasks
|
||||
Schema schema = scope.getType().getErasedType().getAnnotation(Schema.class);
|
||||
Deprecated deprecated = scope.getType().getErasedType().getAnnotation(Deprecated.class);
|
||||
if ((schema != null && schema.deprecated()) || deprecated != null ) {
|
||||
if ((schema != null && schema.deprecated()) || deprecated != null) {
|
||||
collectedTypeAttributes.put("$deprecated", "true");
|
||||
}
|
||||
});
|
||||
@@ -388,7 +437,7 @@ public class JsonSchemaGenerator {
|
||||
});
|
||||
|
||||
// Subtype resolver for all plugins
|
||||
if(builder.build().getSchemaVersion() != SchemaVersion.DRAFT_2019_09) {
|
||||
if (builder.build().getSchemaVersion() != SchemaVersion.DRAFT_2019_09) {
|
||||
builder.forTypesInGeneral()
|
||||
.withSubtypeResolver((declaredType, context) -> {
|
||||
TypeContext typeContext = context.getTypeContext();
|
||||
@@ -577,7 +626,7 @@ public class JsonSchemaGenerator {
|
||||
if (property.has("allOf")) {
|
||||
for (Iterator<JsonNode> it = property.get("allOf").elements(); it.hasNext(); ) {
|
||||
JsonNode child = it.next();
|
||||
if(child.has("default")) {
|
||||
if (child.has("default")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -591,7 +640,7 @@ public class JsonSchemaGenerator {
|
||||
OptionPreset.PLAIN_JSON
|
||||
);
|
||||
|
||||
this.build(builder,false);
|
||||
this.build(builder, false);
|
||||
|
||||
// we don't return base properties unless specified with @PluginProperty
|
||||
builder
|
||||
@@ -603,10 +652,11 @@ public class JsonSchemaGenerator {
|
||||
SchemaGenerator generator = new SchemaGenerator(schemaGeneratorConfig);
|
||||
try {
|
||||
ObjectNode objectNode = generator.generateSchema(cls);
|
||||
replaceAnyOfWithOneOf(objectNode);
|
||||
pullOfDefaultFromOneOf(objectNode);
|
||||
replaceOneOfWithAnyOf(objectNode);
|
||||
pullDocumentationAndDefaultFromAnyOf(objectNode);
|
||||
removeRequiredOnPropsWithDefaults(objectNode);
|
||||
|
||||
return JacksonMapper.toMap(extractMainRef(objectNode));
|
||||
return MAPPER.convertValue(extractMainRef(objectNode), MAP_TYPE_REFERENCE);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Unable to generate jsonschema for '" + cls.getName() + "'", e);
|
||||
}
|
||||
@@ -714,7 +764,8 @@ public class JsonSchemaGenerator {
|
||||
|
||||
field.setAccessible(true);
|
||||
return field.invoke(instance);
|
||||
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | IllegalArgumentException ignored) {
|
||||
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException |
|
||||
IllegalArgumentException ignored) {
|
||||
|
||||
}
|
||||
|
||||
@@ -723,7 +774,8 @@ public class JsonSchemaGenerator {
|
||||
|
||||
field.setAccessible(true);
|
||||
return field.invoke(instance);
|
||||
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | IllegalArgumentException ignored) {
|
||||
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException |
|
||||
IllegalArgumentException ignored) {
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.function.Predicate.not;
|
||||
@@ -50,9 +51,12 @@ public class Plugin {
|
||||
if (subgroup == null) {
|
||||
plugin.title = registeredPlugin.title();
|
||||
} else {
|
||||
subGroupInfos = registeredPlugin.allClass().stream().filter(c -> c.getName().contains(subgroup)).map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class)).toList().getFirst();
|
||||
plugin.title = !subGroupInfos.title().isEmpty() ? subGroupInfos.title() : subgroup.substring(subgroup.lastIndexOf('.') + 1);;
|
||||
|
||||
subGroupInfos = registeredPlugin.allClass().stream()
|
||||
.filter(c -> c.getPackageName().contains(subgroup))
|
||||
.min(Comparator.comparingInt(a -> a.getPackageName().length()))
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.orElseThrow();
|
||||
plugin.title = !subGroupInfos.title().isEmpty() ? subGroupInfos.title() : subgroup.substring(subgroup.lastIndexOf('.') + 1);
|
||||
}
|
||||
plugin.group = registeredPlugin.group();
|
||||
plugin.description = subGroupInfos != null && !subGroupInfos.description().isEmpty() ? subGroupInfos.description() : registeredPlugin.description();
|
||||
@@ -74,27 +78,28 @@ public class Plugin {
|
||||
plugin.categories = subGroupInfos != null ?
|
||||
Arrays.stream(subGroupInfos.categories()).toList() :
|
||||
registeredPlugin
|
||||
.allClass()
|
||||
.stream()
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(r -> Arrays.stream(r.categories()))
|
||||
.distinct()
|
||||
.toList();
|
||||
.allClass()
|
||||
.stream()
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(r -> Arrays.stream(r.categories()))
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
plugin.subGroup = subgroup;
|
||||
|
||||
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
Predicate<Class<?>> packagePredicate = c -> subgroup == null || c.getPackageName().equals(subgroup);
|
||||
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated, packagePredicate).stream().toList();
|
||||
|
||||
return plugin;
|
||||
}
|
||||
@@ -103,15 +108,16 @@ public class Plugin {
|
||||
* Filters the given list of class all internal Plugin, as well as, all legacy org.kestra classes.
|
||||
* Those classes are only filtered from the documentation to ensure backward compatibility.
|
||||
*
|
||||
* @param list The list of classes?
|
||||
* @param list The list of classes?
|
||||
* @param includeDeprecated whether to include deprecated plugins or not
|
||||
* @return a filtered streams.
|
||||
* @return a filtered streams.
|
||||
*/
|
||||
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list, boolean includeDeprecated) {
|
||||
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list, boolean includeDeprecated, Predicate<Class<?>> clazzFilter) {
|
||||
return list
|
||||
.stream()
|
||||
.filter(not(io.kestra.core.models.Plugin::isInternal))
|
||||
.filter(p -> includeDeprecated || !io.kestra.core.models.Plugin.isDeprecated(p))
|
||||
.filter(clazzFilter)
|
||||
.map(Class::getName)
|
||||
.filter(c -> !c.startsWith("org.kestra."))
|
||||
.toList();
|
||||
|
||||
@@ -1,11 +1,20 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import io.kestra.core.utils.Enums;
|
||||
|
||||
|
||||
public enum SchemaType {
|
||||
flow,
|
||||
template,
|
||||
task,
|
||||
trigger,
|
||||
plugindefault,
|
||||
apps,
|
||||
dashboard
|
||||
FLOW,
|
||||
TEMPLATE,
|
||||
TASK,
|
||||
TRIGGER,
|
||||
PLUGINDEFAULT,
|
||||
APPS,
|
||||
DASHBOARD;
|
||||
|
||||
@JsonCreator
|
||||
public static SchemaType fromString(final String value) {
|
||||
return Enums.getForNameIgnoreCase(value, SchemaType.class);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user