mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-29 09:00:26 -05:00
Compare commits
681 Commits
v0.21.16
...
v0.22.0-rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8617eb0c7b | ||
|
|
2a002e9531 | ||
|
|
bc8bd7b00b | ||
|
|
40f6334b0b | ||
|
|
71151f5ac2 | ||
|
|
75e7635505 | ||
|
|
6c9dc8fba4 | ||
|
|
28d1f005a4 | ||
|
|
7fd7a6fcbc | ||
|
|
57707faf37 | ||
|
|
c64c2c7104 | ||
|
|
8f9b2fc0db | ||
|
|
aab9e55794 | ||
|
|
f9d48d6f74 | ||
|
|
267d848d3f | ||
|
|
edbf14c1b8 | ||
|
|
d9ac267161 | ||
|
|
74455ad993 | ||
|
|
e2dd02173f | ||
|
|
c7b3a42f34 | ||
|
|
fe882bbee6 | ||
|
|
1efb21c087 | ||
|
|
beb6182104 | ||
|
|
7f0d3521db | ||
|
|
7e4453632c | ||
|
|
4a3d6b30d2 | ||
|
|
fe7c14c048 | ||
|
|
7b007aafc0 | ||
|
|
f6aa2729b2 | ||
|
|
0ca25445fd | ||
|
|
4900963cb9 | ||
|
|
21d264f444 | ||
|
|
b6a3d0cd54 | ||
|
|
3244b1c293 | ||
|
|
0987d0b349 | ||
|
|
5a2ac895ec | ||
|
|
7421693445 | ||
|
|
19894dbcd6 | ||
|
|
62e37f3b15 | ||
|
|
37af61f41d | ||
|
|
491f07296a | ||
|
|
6d59630a67 | ||
|
|
a098650559 | ||
|
|
505049a69e | ||
|
|
a1abd28a3d | ||
|
|
49a29c4bf1 | ||
|
|
49af8f336a | ||
|
|
8642771a2b | ||
|
|
e44487c8d8 | ||
|
|
f29aab0719 | ||
|
|
73c7a2d3d3 | ||
|
|
773a6e9093 | ||
|
|
df1bbcfb7f | ||
|
|
f3419084fa | ||
|
|
ab7788aeaf | ||
|
|
99e06632e0 | ||
|
|
7b3d5a593e | ||
|
|
cca7ed0bff | ||
|
|
481138e433 | ||
|
|
267ffb69dc | ||
|
|
43f1374aa3 | ||
|
|
fe944ccc5b | ||
|
|
cec8702fc6 | ||
|
|
f898a679c4 | ||
|
|
9a9d59f433 | ||
|
|
0579e23a20 | ||
|
|
343367d8c5 | ||
|
|
d47e8083ac | ||
|
|
7c4d6b02a7 | ||
|
|
95dd1b8a55 | ||
|
|
0ec2d88429 | ||
|
|
c3b504a9ed | ||
|
|
8cba4dab62 | ||
|
|
1b22d1e3d1 | ||
|
|
6194f244c8 | ||
|
|
11a1664865 | ||
|
|
db394f6371 | ||
|
|
a7433c6f62 | ||
|
|
745c64c4b0 | ||
|
|
5d15e2081c | ||
|
|
7d098eaa4e | ||
|
|
0ee9abb372 | ||
|
|
3f542ae737 | ||
|
|
811cc7722a | ||
|
|
f4f8ecb247 | ||
|
|
3566c4d365 | ||
|
|
a7421987b7 | ||
|
|
c640b8208b | ||
|
|
db71b56dcd | ||
|
|
8076fcc990 | ||
|
|
bd520c2150 | ||
|
|
2c731a0192 | ||
|
|
3b291b75cf | ||
|
|
1563d1d2da | ||
|
|
5c73953c81 | ||
|
|
f89aa8d278 | ||
|
|
7efa3975fc | ||
|
|
a3362b0c85 | ||
|
|
ab9ba91e5a | ||
|
|
355e24c9da | ||
|
|
78dcca64ae | ||
|
|
57c749d812 | ||
|
|
2d7c233a5e | ||
|
|
cfac2c6fef | ||
|
|
06352e46dc | ||
|
|
08312aef97 | ||
|
|
e05554e12d | ||
|
|
cbb6ca77f7 | ||
|
|
3dcd3c9785 | ||
|
|
7f6e15ec4a | ||
|
|
ab061e9a12 | ||
|
|
8eb91b75e9 | ||
|
|
0134d5e5c2 | ||
|
|
6946c92683 | ||
|
|
149dcac5f9 | ||
|
|
0af5aea287 | ||
|
|
9413d4df06 | ||
|
|
87ee267f10 | ||
|
|
02336ed397 | ||
|
|
379199e186 | ||
|
|
ebec8c2121 | ||
|
|
564c36ceab | ||
|
|
3d248d0d74 | ||
|
|
70e6d47c1a | ||
|
|
1ba54cd08e | ||
|
|
eb6b40a04b | ||
|
|
d314c52924 | ||
|
|
71a59e7b85 | ||
|
|
ff504afd8f | ||
|
|
64e5b80049 | ||
|
|
c9baaf8565 | ||
|
|
02ad2af305 | ||
|
|
bed11f1541 | ||
|
|
6d14773ea7 | ||
|
|
74577cb059 | ||
|
|
e63b019857 | ||
|
|
8684bd9481 | ||
|
|
586e089ada | ||
|
|
acebfef0d1 | ||
|
|
f7019af9d5 | ||
|
|
1419680006 | ||
|
|
64290f5d17 | ||
|
|
e2da2dfeb0 | ||
|
|
977fe222a4 | ||
|
|
0a24a5d13f | ||
|
|
ecfe925ece | ||
|
|
76f6c93e48 | ||
|
|
cfb76f4279 | ||
|
|
50bbf03d53 | ||
|
|
2c9f09a82e | ||
|
|
c9579e5dcd | ||
|
|
1b13559a61 | ||
|
|
c2e62d653d | ||
|
|
14ff4438fe | ||
|
|
39b8fc1039 | ||
|
|
1cb323b7aa | ||
|
|
4a12827684 | ||
|
|
82a346b2ce | ||
|
|
dea392a941 | ||
|
|
d0af5767d5 | ||
|
|
a064c7a956 | ||
|
|
9b5b2b981f | ||
|
|
fac2ae813c | ||
|
|
233a871c58 | ||
|
|
05f24f3d65 | ||
|
|
66e5a7ca31 | ||
|
|
fefaa7cdbb | ||
|
|
fd6b9dc065 | ||
|
|
5935308e43 | ||
|
|
83f06f3374 | ||
|
|
8d7bc6fdd4 | ||
|
|
03a44c1039 | ||
|
|
5af0858445 | ||
|
|
0a304ff1d3 | ||
|
|
92418841fc | ||
|
|
01036c829d | ||
|
|
5ffeee532c | ||
|
|
c606760522 | ||
|
|
0870d8ebd8 | ||
|
|
d2d0726f73 | ||
|
|
868a232527 | ||
|
|
3ef11044a2 | ||
|
|
2d1582f761 | ||
|
|
94d36fdc01 | ||
|
|
0388909828 | ||
|
|
ff4f43c39b | ||
|
|
140d058beb | ||
|
|
4276a0afd9 | ||
|
|
1692cab533 | ||
|
|
3013e9dfd5 | ||
|
|
440a942900 | ||
|
|
7696d41d5f | ||
|
|
45abaa146e | ||
|
|
e9f2711cd3 | ||
|
|
166262209a | ||
|
|
99bad6abb0 | ||
|
|
acc10ed638 | ||
|
|
88341bb5c9 | ||
|
|
e8411b6b11 | ||
|
|
6af105a8bf | ||
|
|
9a2e09cc8c | ||
|
|
9b1a9d64bc | ||
|
|
4678616520 | ||
|
|
07e4598fa4 | ||
|
|
01fe48b47a | ||
|
|
f7e61a46df | ||
|
|
dfe5552a1e | ||
|
|
593558dd22 | ||
|
|
b22764290e | ||
|
|
78bfbf0d5e | ||
|
|
f505f29360 | ||
|
|
1726347dbf | ||
|
|
bbf232ba52 | ||
|
|
8f9fc5fe4b | ||
|
|
9a56b763f4 | ||
|
|
40d37d9e42 | ||
|
|
e200bbdb6b | ||
|
|
ef65623b13 | ||
|
|
d8b2e92e8d | ||
|
|
2c63112a59 | ||
|
|
20b87f1c9c | ||
|
|
ca4e6a4b33 | ||
|
|
82df58d26f | ||
|
|
b6f91128a1 | ||
|
|
a7abc17c0b | ||
|
|
9607d0152e | ||
|
|
fc3c1a4c54 | ||
|
|
4cee4fca61 | ||
|
|
475573d13e | ||
|
|
cfac9f339f | ||
|
|
cd97705d87 | ||
|
|
56406c6b5c | ||
|
|
38921a265a | ||
|
|
be61869642 | ||
|
|
1f09afe564 | ||
|
|
35a0520dba | ||
|
|
cf6fad0896 | ||
|
|
af0c1134e1 | ||
|
|
12180457ea | ||
|
|
1eb9adf30a | ||
|
|
e7f2ec2aee | ||
|
|
94421f141e | ||
|
|
65b071adfd | ||
|
|
d2bf56fecf | ||
|
|
998a5cca32 | ||
|
|
4b48ad597d | ||
|
|
66fdb58f4b | ||
|
|
8c708e2d53 | ||
|
|
677585213a | ||
|
|
ff3f90465d | ||
|
|
8f5189df49 | ||
|
|
9446eefd94 | ||
|
|
334410ab58 | ||
|
|
7ccb97a963 | ||
|
|
3957177455 | ||
|
|
5b42d0adba | ||
|
|
8bd3c2fef8 | ||
|
|
d8fc4a9ce2 | ||
|
|
bfd82e0b5c | ||
|
|
fa07cbd3b9 | ||
|
|
2c77a43935 | ||
|
|
60a84a49f8 | ||
|
|
d8295ef4ae | ||
|
|
b02325a2e4 | ||
|
|
a098847c65 | ||
|
|
5f21eb5790 | ||
|
|
036a7cf4f7 | ||
|
|
446a034d6b | ||
|
|
3fff36881a | ||
|
|
174ac280d2 | ||
|
|
c610ccdaee | ||
|
|
d8ed5b8b88 | ||
|
|
17e54134ce | ||
|
|
cacbd069f9 | ||
|
|
de5a0f4623 | ||
|
|
0f6c8d3521 | ||
|
|
c8207b8706 | ||
|
|
39cd6189ac | ||
|
|
9b4f3148fc | ||
|
|
ca6a7a9e16 | ||
|
|
3dc8e98ed3 | ||
|
|
ea79be9de8 | ||
|
|
3804bdc7f7 | ||
|
|
d04764814a | ||
|
|
020d674d8c | ||
|
|
ec7458dce5 | ||
|
|
ae75ea06d2 | ||
|
|
53cd056871 | ||
|
|
9bab2d2cfa | ||
|
|
4d87d95eac | ||
|
|
81635ddc84 | ||
|
|
781d1338e9 | ||
|
|
37ba85a130 | ||
|
|
7c7ac5a70d | ||
|
|
f315485fc6 | ||
|
|
04b8a0a14c | ||
|
|
8dc702ed16 | ||
|
|
6dee52da16 | ||
|
|
31dce9aadd | ||
|
|
cf1d98f56d | ||
|
|
3bbe65653b | ||
|
|
5108dd5e25 | ||
|
|
1ea331b8ab | ||
|
|
e9ceda4666 | ||
|
|
a996347de2 | ||
|
|
d8e3a9ab8b | ||
|
|
e097bdbe53 | ||
|
|
0090caad3f | ||
|
|
7fbe433221 | ||
|
|
b489848ab9 | ||
|
|
fc7ef1ca38 | ||
|
|
0dfe54baa8 | ||
|
|
45c5592609 | ||
|
|
3134d2abe2 | ||
|
|
2bb5ac61c2 | ||
|
|
be04c168fd | ||
|
|
04c4916ac6 | ||
|
|
8d2af87db3 | ||
|
|
e7950279bb | ||
|
|
44936e9cdf | ||
|
|
27755a3791 | ||
|
|
ff4f7abb0f | ||
|
|
48b117b351 | ||
|
|
daca5675d0 | ||
|
|
b3278bf208 | ||
|
|
8a26fdd83e | ||
|
|
38b8190be9 | ||
|
|
52cb63c436 | ||
|
|
f3cff1b8cd | ||
|
|
a2c89e508c | ||
|
|
c8aae742a2 | ||
|
|
eab7b746e5 | ||
|
|
1d5981665f | ||
|
|
4500c976d6 | ||
|
|
cf10269f25 | ||
|
|
cf635058cd | ||
|
|
bd6937a9f0 | ||
|
|
59cc6f32d0 | ||
|
|
b8423ef70e | ||
|
|
0ec1b3a983 | ||
|
|
8cebd39f0d | ||
|
|
9fce6cfe65 | ||
|
|
c8c0c4e63f | ||
|
|
25723b1acb | ||
|
|
065d299422 | ||
|
|
9ec4d9282b | ||
|
|
16284e5b95 | ||
|
|
a115eb5373 | ||
|
|
ac2643c108 | ||
|
|
4710baec7d | ||
|
|
1efe531228 | ||
|
|
df92491e55 | ||
|
|
e7c65e020a | ||
|
|
90212ee7dc | ||
|
|
ac1984b349 | ||
|
|
c5767fd313 | ||
|
|
8ee79264ed | ||
|
|
c6d21776ec | ||
|
|
45a9ea0cc5 | ||
|
|
1f60a86739 | ||
|
|
d0e8b691e3 | ||
|
|
0687a97eb8 | ||
|
|
dcf89b5db8 | ||
|
|
b1e29fbe31 | ||
|
|
4b2113fb9b | ||
|
|
dc1728e420 | ||
|
|
8c83290d7a | ||
|
|
212b3a82a9 | ||
|
|
1612c65ea3 | ||
|
|
6257b1508c | ||
|
|
f1670b8ac8 | ||
|
|
ce15ca1cac | ||
|
|
a53395ab3f | ||
|
|
e0eaf675b2 | ||
|
|
666f8a7ad9 | ||
|
|
cebe8f3545 | ||
|
|
16e3830c9c | ||
|
|
15b85ac952 | ||
|
|
db016a085f | ||
|
|
d5f6da10dd | ||
|
|
3723275817 | ||
|
|
ef29883138 | ||
|
|
5ced9b6c4f | ||
|
|
11f3629f38 | ||
|
|
d8c5a1e14c | ||
|
|
cee2734817 | ||
|
|
13cb0fb96d | ||
|
|
a020e3f3ae | ||
|
|
12fef24279 | ||
|
|
4a55485cd6 | ||
|
|
aed5734e17 | ||
|
|
d55ce16f57 | ||
|
|
4abf3e9e6b | ||
|
|
fbd8934346 | ||
|
|
df6d339310 | ||
|
|
a89ef71580 | ||
|
|
89c2d3951c | ||
|
|
e061a3617c | ||
|
|
410cf0e389 | ||
|
|
f783ab72b5 | ||
|
|
72e78c8c31 | ||
|
|
a716094a9d | ||
|
|
c75def0eac | ||
|
|
c27d5ce480 | ||
|
|
49960d67ab | ||
|
|
d9d2f86971 | ||
|
|
95f5862201 | ||
|
|
e6419cf7a2 | ||
|
|
c1faeab068 | ||
|
|
dd3ff3e771 | ||
|
|
e635338b47 | ||
|
|
ddfed2e65c | ||
|
|
52fe6afae0 | ||
|
|
a9ff2af159 | ||
|
|
bf19a8f5ad | ||
|
|
41712b8d80 | ||
|
|
f8a6e3fb04 | ||
|
|
ad651cdc53 | ||
|
|
f57a463983 | ||
|
|
b3e66d8844 | ||
|
|
dc64d81dd0 | ||
|
|
be30034143 | ||
|
|
f8d8e0998c | ||
|
|
b8d0ae3ecf | ||
|
|
def8fa3ff1 | ||
|
|
cfc0c9f9f2 | ||
|
|
49fe36250a | ||
|
|
0c19f4c518 | ||
|
|
1e6d553ce6 | ||
|
|
1ebbdbbae6 | ||
|
|
5b29a0d070 | ||
|
|
239fb6a685 | ||
|
|
9924ba5ae2 | ||
|
|
13ac335c93 | ||
|
|
9d717caf95 | ||
|
|
1d65fd96b4 | ||
|
|
b45a44bd35 | ||
|
|
bd9ba882fb | ||
|
|
4b682931e0 | ||
|
|
d657f48278 | ||
|
|
714938bec6 | ||
|
|
73a9eb485f | ||
|
|
4d9f2a130f | ||
|
|
a040ec44b9 | ||
|
|
b1c27b1869 | ||
|
|
ed14493632 | ||
|
|
3803f4ea4c | ||
|
|
4992cf34fa | ||
|
|
420afd7758 | ||
|
|
c3852a2933 | ||
|
|
41417adf8b | ||
|
|
cd89cd8fb6 | ||
|
|
9b5e16a02d | ||
|
|
f4b78755ad | ||
|
|
7bf42cb1cb | ||
|
|
7dbf86d54e | ||
|
|
e7f551edc2 | ||
|
|
31d2212419 | ||
|
|
dd8ae5e642 | ||
|
|
0aadbab9c4 | ||
|
|
49759da3b4 | ||
|
|
489e01cf38 | ||
|
|
1821ec27c9 | ||
|
|
a6918b1f34 | ||
|
|
17693c7380 | ||
|
|
806135e3e9 | ||
|
|
7381b32e16 | ||
|
|
7b175dac04 | ||
|
|
2c3de87dcc | ||
|
|
7b9efa08a6 | ||
|
|
1a96fe4b4b | ||
|
|
938de9b222 | ||
|
|
9505c48e55 | ||
|
|
fe197ebd21 | ||
|
|
c5b6d6b2a7 | ||
|
|
90da5f7cf7 | ||
|
|
4c5638d957 | ||
|
|
8260dc6033 | ||
|
|
48320a89d8 | ||
|
|
55bdfac7f9 | ||
|
|
f599f25755 | ||
|
|
8de839f326 | ||
|
|
25370d10bd | ||
|
|
b59d36a90e | ||
|
|
a5483a0d4c | ||
|
|
9863e88230 | ||
|
|
88a5cd69e3 | ||
|
|
ec73d58649 | ||
|
|
0becf74339 | ||
|
|
bfcfb2ac16 | ||
|
|
27f9a505af | ||
|
|
99db3f35b7 | ||
|
|
c53239470d | ||
|
|
c3e830c2c7 | ||
|
|
9790f02376 | ||
|
|
899a1f6f17 | ||
|
|
9455643660 | ||
|
|
8a3f413236 | ||
|
|
4986d68f57 | ||
|
|
09ff9d4054 | ||
|
|
e910685d57 | ||
|
|
5aa73ec48a | ||
|
|
5dcd27c064 | ||
|
|
9ba27b9bd5 | ||
|
|
549da52afa | ||
|
|
d486d78200 | ||
|
|
a7a134a050 | ||
|
|
e0f4ab735d | ||
|
|
b052925b04 | ||
|
|
c3443ab99a | ||
|
|
ec967a57fb | ||
|
|
9ba13ae994 | ||
|
|
be1871430e | ||
|
|
39257bf3bb | ||
|
|
346100c86c | ||
|
|
0c3ed3b751 | ||
|
|
1bbe0e659e | ||
|
|
123f748030 | ||
|
|
d0b53d217d | ||
|
|
6f7bb80c6d | ||
|
|
1eacb447d5 | ||
|
|
3f429ef0a8 | ||
|
|
dc8576afed | ||
|
|
c31609a12e | ||
|
|
84c07ef011 | ||
|
|
e335b76b3c | ||
|
|
95b94f3961 | ||
|
|
e0ee26a9c0 | ||
|
|
480fc75897 | ||
|
|
c8d7e5f357 | ||
|
|
792f7b6911 | ||
|
|
2a95aee968 | ||
|
|
129ba6abe4 | ||
|
|
ab2a0108a9 | ||
|
|
03dccd1444 | ||
|
|
4c401ce0c0 | ||
|
|
13be8b8125 | ||
|
|
440faa7cd1 | ||
|
|
5a8a4e99d7 | ||
|
|
296e1a67a5 | ||
|
|
ecd36ec2a4 | ||
|
|
ca9b90a0ec | ||
|
|
37e337a657 | ||
|
|
157f78fae0 | ||
|
|
7158a0c98f | ||
|
|
753308a341 | ||
|
|
21aebe4e77 | ||
|
|
959e80efa8 | ||
|
|
bba612ef9b | ||
|
|
a0483dc201 | ||
|
|
2f3d7f573b | ||
|
|
2e09777790 | ||
|
|
0b1eda0f03 | ||
|
|
1ee6402fa2 | ||
|
|
71933352b6 | ||
|
|
cb7ed73bea | ||
|
|
5f13d5b2aa | ||
|
|
e4973c3319 | ||
|
|
abc3372161 | ||
|
|
9619dca764 | ||
|
|
e80b2011de | ||
|
|
86f3acace3 | ||
|
|
b6c9f50503 | ||
|
|
2e9938806a | ||
|
|
13bc0de9dc | ||
|
|
da7509ce30 | ||
|
|
15aeee3d8b | ||
|
|
42920508b7 | ||
|
|
f45c703fd9 | ||
|
|
872c70fe37 | ||
|
|
bbb03b288b | ||
|
|
8d8b7e7a67 | ||
|
|
a9042fb27b | ||
|
|
5ff5667231 | ||
|
|
42ad09d79f | ||
|
|
30db740de8 | ||
|
|
20d55122e7 | ||
|
|
3294c358fa | ||
|
|
42899d30d1 | ||
|
|
6ba71712a8 | ||
|
|
36b8ef866d | ||
|
|
fff8d630c5 | ||
|
|
bc630684b1 | ||
|
|
f8b64cfbdf | ||
|
|
4ee8b33aeb | ||
|
|
68074723c4 | ||
|
|
52159ee647 | ||
|
|
d697e5dc67 | ||
|
|
21733d8497 | ||
|
|
0501d29255 | ||
|
|
560703fde4 | ||
|
|
671eb2b57e | ||
|
|
5cc30558c2 | ||
|
|
5a46204394 | ||
|
|
c918276103 | ||
|
|
04dd038b0f | ||
|
|
1bbe48fe5b | ||
|
|
9499ee2c27 | ||
|
|
05bbaa4f00 | ||
|
|
fa5ef3b4c6 | ||
|
|
e927a1f784 | ||
|
|
36306495e4 | ||
|
|
594d005168 | ||
|
|
c992496fb0 | ||
|
|
e15eb55877 | ||
|
|
85ebf49b68 | ||
|
|
3a0beabe4f | ||
|
|
d769b7aad7 | ||
|
|
f1f0d31864 | ||
|
|
bd0e4a5f48 | ||
|
|
d50f631fb6 | ||
|
|
26c15a444d | ||
|
|
97d12f9bc3 | ||
|
|
ae05de4a1c | ||
|
|
625135959c | ||
|
|
e6827f2736 | ||
|
|
91bf50e837 | ||
|
|
64c8e0295c | ||
|
|
1565f3361a | ||
|
|
e2b67f253a | ||
|
|
db0422b142 | ||
|
|
b1b80acbca | ||
|
|
4f9b7f7417 | ||
|
|
83c7b2e663 | ||
|
|
dee76c038a | ||
|
|
310210286d | ||
|
|
555b769aa2 | ||
|
|
e527f0133a | ||
|
|
631a016e06 | ||
|
|
3a8b24edae | ||
|
|
65dd4f9fd6 | ||
|
|
a4dcb8dcd6 | ||
|
|
00aa28fc41 | ||
|
|
3c7e26c88b | ||
|
|
4e753c7b98 | ||
|
|
1fc0b21071 | ||
|
|
f17b5802b5 | ||
|
|
5389ec78a1 | ||
|
|
6190d91134 | ||
|
|
96f4466f1d | ||
|
|
6ee0e86ca3 | ||
|
|
db4f186bdc | ||
|
|
9b2c4c9a13 | ||
|
|
7a37a950bf | ||
|
|
a7df59c8bf | ||
|
|
5b6fbe9609 | ||
|
|
d80de4410d | ||
|
|
0e09f6821c | ||
|
|
157566300d | ||
|
|
e1a4f2e2fc | ||
|
|
cd822bd349 | ||
|
|
84447fd428 | ||
|
|
78060ec2f8 | ||
|
|
646c8fa773 | ||
|
|
7a299e51f8 | ||
|
|
accbafe132 | ||
|
|
4915465573 | ||
|
|
4234c76b03 | ||
|
|
03caf38251 | ||
|
|
1931300e98 | ||
|
|
450310b6a5 | ||
|
|
2777b34385 | ||
|
|
d4d8e326e4 | ||
|
|
fde739a3be | ||
|
|
f96cbf1ad3 | ||
|
|
84a30d4002 | ||
|
|
6afdbb01fa | ||
|
|
b91336f446 | ||
|
|
f1b2940652 | ||
|
|
75b8b4a483 | ||
|
|
c9cb2593bc | ||
|
|
e23f34e53f | ||
|
|
2461e8cda9 | ||
|
|
b8fac95e1e | ||
|
|
d17fe2548a | ||
|
|
b86b4bb160 | ||
|
|
64108ae24d | ||
|
|
8d00c8a557 | ||
|
|
07dfaada9c | ||
|
|
0b82b25a48 | ||
|
|
5c8c2a5c42 |
67
.devcontainer/Dockerfile
Normal file
67
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,67 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
USER root
|
||||
WORKDIR /root
|
||||
|
||||
RUN apt update && apt install -y \
|
||||
apt-transport-https ca-certificates gnupg curl wget git zip unzip less zsh net-tools iputils-ping jq lsof
|
||||
|
||||
ENV HOME="/root"
|
||||
|
||||
# --------------------------------------
|
||||
# Git
|
||||
# --------------------------------------
|
||||
# Need to add the devcontainer workspace folder as a safe directory to enable git
|
||||
# version control system to be enabled in the containers file system.
|
||||
RUN git config --global --add safe.directory "/workspaces/kestra"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Oh my zsh
|
||||
# --------------------------------------
|
||||
RUN sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" -- \
|
||||
-t robbyrussell \
|
||||
-p git -p node -p npm
|
||||
|
||||
ENV SHELL=/bin/zsh
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Java
|
||||
# --------------------------------------
|
||||
RUN wget https://download.oracle.com/java/21/latest/jdk-21_linux-x64_bin.deb
|
||||
RUN dpkg -i ./jdk-21_linux-x64_bin.deb
|
||||
ENV JAVA_HOME=/usr/java/jdk-21-oracle-x64
|
||||
ENV PATH="$PATH:$JAVA_HOME/bin"
|
||||
# Will load a custom configuration file for Micronaut
|
||||
ENV MICRONAUT_ENVIRONMENTS=local,override
|
||||
# Sets the path where you save plugins as Jar and is loaded during the startup process
|
||||
ENV KESTRA_PLUGINS_PATH="/workspaces/kestra/local/plugins"
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Node.js
|
||||
# --------------------------------------
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_22.x -o nodesource_setup.sh \
|
||||
&& bash nodesource_setup.sh && apt install -y nodejs
|
||||
# Increases JavaScript heap memory to 4GB to prevent heap out of error during startup
|
||||
ENV NODE_OPTIONS=--max-old-space-size=4096
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# Python
|
||||
# --------------------------------------
|
||||
RUN apt install -y python3 pip python3-venv
|
||||
# --------------------------------------
|
||||
|
||||
# --------------------------------------
|
||||
# SSH
|
||||
# --------------------------------------
|
||||
RUN mkdir -p ~/.ssh
|
||||
RUN touch ~/.ssh/config
|
||||
RUN echo "Host github.com" >> ~/.ssh/config \
|
||||
&& echo " IdentityFile ~/.ssh/id_ed25519" >> ~/.ssh/config
|
||||
RUN touch ~/.ssh/id_ed25519
|
||||
# --------------------------------------
|
||||
149
.devcontainer/README.md
Normal file
149
.devcontainer/README.md
Normal file
@@ -0,0 +1,149 @@
|
||||
# Kestra Devcontainer
|
||||
|
||||
This devcontainer provides a quick and easy setup for anyone using VSCode to get up and running quickly with this project to start development on either the frontend or backend. It bootstraps a docker container for you to develop inside of without the need to manually setup the environment.
|
||||
|
||||
---
|
||||
|
||||
## INSTRUCTIONS
|
||||
|
||||
### Setup:
|
||||
|
||||
Take a look at this guide to get an idea of what the setup is like as this devcontainer setup follows this approach: https://kestra.io/docs/getting-started/contributing
|
||||
|
||||
Once you have this repo cloned to your local system, you will need to install the VSCode extension [Remote Development](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack).
|
||||
|
||||
Then run the following command from the command palette:
|
||||
`Dev Containers: Open Folder in Container...` and select your Kestra root folder.
|
||||
|
||||
This will then put you inside a docker container ready for development.
|
||||
|
||||
NOTE: you'll need to wait for the gradle build to finish and compile Java files but this process should happen automatically within VSCode.
|
||||
|
||||
In the meantime, you can move onto the next step...
|
||||
|
||||
---
|
||||
|
||||
### Development:
|
||||
|
||||
- Create a `.env.development.local` file in the `ui` folder and paste the following:
|
||||
|
||||
```bash
|
||||
# This lets the frontend know what the backend URL is but you are free to change this to your actual server URL e.g. hosted version of Kestra.
|
||||
VITE_APP_API_URL=http://localhost:8080
|
||||
```
|
||||
|
||||
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
|
||||
|
||||
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
|
||||
|
||||
Now you have two choices:
|
||||
|
||||
`Local mode`:
|
||||
|
||||
Runs the Kestra server in local mode which uses a H2 database, so this is the only config you'd need:
|
||||
|
||||
```yaml
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
You can then open a new terminal and run the following command to start the backend server: `./gradlew runLocal`
|
||||
|
||||
`Standalone mode`:
|
||||
|
||||
Runs in standalone mode which uses Postgres. Make sure to have a local Postgres instance already running on localhost:
|
||||
|
||||
```yaml
|
||||
kestra:
|
||||
repository:
|
||||
type: postgres
|
||||
storage:
|
||||
type: local
|
||||
local:
|
||||
base-path: "/app/storage"
|
||||
queue:
|
||||
type: postgres
|
||||
tasks:
|
||||
tmp-dir:
|
||||
path: /tmp/kestra-wd/tmp
|
||||
anonymous-usage-report:
|
||||
enabled: false
|
||||
server:
|
||||
basic-auth:
|
||||
enabled: false
|
||||
|
||||
datasources:
|
||||
postgres:
|
||||
# It is important to note that you must use the "host.docker.internal" host when connecting to a docker container outside of your devcontainer as attempting to use localhost will only point back to this devcontainer.
|
||||
url: jdbc:postgresql://host.docker.internal:5432/kestra
|
||||
driverClassName: org.postgresql.Driver
|
||||
username: kestra
|
||||
password: k3str4
|
||||
|
||||
flyway:
|
||||
datasources:
|
||||
postgres:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
# We must ignore missing migrations as we may delete the wrong ones or delete those that are not used anymore.
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
|
||||
micronaut:
|
||||
server:
|
||||
cors:
|
||||
enabled: true
|
||||
configurations:
|
||||
all:
|
||||
allowedOrigins:
|
||||
- http://localhost:5173
|
||||
```
|
||||
|
||||
Then add the following settings to the `.vscode/launch.json` file:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "java",
|
||||
"name": "Kestra Standalone",
|
||||
"request": "launch",
|
||||
"mainClass": "io.kestra.cli.App",
|
||||
"projectName": "cli",
|
||||
"args": "server standalone"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
You can then use the VSCode `Run and Debug` extension to start the Kestra server.
|
||||
|
||||
Additionally, if you're doing frontend development, you can run `npm run dev` from the `ui` folder after having the above running (which will provide a backend) to access your application from `localhost:5173`. This has the benefit to watch your changes and hot-reload upon doing frontend changes.
|
||||
|
||||
#### Plugins
|
||||
If you want your plugins to be loaded inside your devcontainer, point the `source` field to a folder containing jars of the plugins you want to embed in the following snippet in `devcontainer.json`:
|
||||
```
|
||||
"mounts": [
|
||||
{
|
||||
"source": "/absolute/path/to/your/local/jar/plugins/folder",
|
||||
"target": "/workspaces/kestra/local/plugins",
|
||||
"type": "bind"
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### GIT
|
||||
|
||||
If you want to commit to GitHub, make sure to navigate to the `~/.ssh` folder and either create a new SSH key or override the existing `id_ed25519` file and paste an existing SSH key from your local machine into this file. You will then need to change the permissions of the file by running: `chmod 600 id_ed25519`. This will allow you to then push to GitHub.
|
||||
|
||||
---
|
||||
46
.devcontainer/devcontainer.json
Normal file
46
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "kestra",
|
||||
"build": {
|
||||
"context": ".",
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"workspaceFolder": "/workspaces/kestra",
|
||||
"forwardPorts": [5173, 8080],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/bin/zsh"
|
||||
}
|
||||
},
|
||||
"workbench.iconTheme": "vscode-icons",
|
||||
"editor.tabSize": 4,
|
||||
"editor.formatOnSave": true,
|
||||
"files.insertFinalNewline": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"telemetry.telemetryLevel": "off",
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": "active"
|
||||
},
|
||||
"extensions": [
|
||||
"redhat.vscode-yaml",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"eamodio.gitlens",
|
||||
"esbenp.prettier-vscode",
|
||||
"aaron-bond.better-comments",
|
||||
"codeandstuff.package-json-upgrade",
|
||||
"andys8.jest-snippets",
|
||||
"oderwat.indent-rainbow",
|
||||
"evondev.indent-rainbow-palettes",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"IronGeek.vscode-env",
|
||||
"yoavbls.pretty-ts-errors",
|
||||
"github.vscode-github-actions",
|
||||
"vscjava.vscode-java-pack",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
117
.github/workflows/check.yml
vendored
117
.github/workflows/check.yml
vendored
@@ -1,117 +0,0 @@
|
||||
name: Daily Core check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
env:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
name: Check & Publish
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Build the docker-compose stack
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
|
||||
# Gradle check
|
||||
- name: Build with Gradle
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel --refresh-dependencies
|
||||
|
||||
# Sonar
|
||||
- name: Analyze with Sonar
|
||||
if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}'
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: 'Set up Cloud SDK'
|
||||
if: ${{ github.ref == 'refs/heads/develop' && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
|
||||
- name: 'Copy jacoco files'
|
||||
if: ${{ github.ref == 'refs/heads/develop' && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
|
||||
# report test
|
||||
- name: Test Report
|
||||
uses: mikepenz/action-junit-report@v5
|
||||
if: success() || failure()
|
||||
with:
|
||||
report_paths: '**/build/test-results/**/TEST-*.xml'
|
||||
|
||||
# Codecov
|
||||
- uses: codecov/codecov-action@v5
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: always() && job.status != 'success'
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
@@ -6,11 +6,11 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
schedule:
|
||||
- cron: '0 5 * * 1'
|
||||
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -51,13 +51,23 @@ jobs:
|
||||
# Set up JDK
|
||||
- name: Set up JDK
|
||||
uses: actions/setup-java@v4
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: 21
|
||||
|
||||
- name: Setup gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
uses: gradle/actions/setup-gradle@v4
|
||||
|
||||
- name: Build with Gradle
|
||||
if: ${{ matrix.language == 'java' }}
|
||||
run: ./gradlew testClasses -x :ui:installFrontend -x :ui:assembleFrontend
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
if: ${{ matrix.language != 'java' }}
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
33
.github/workflows/docker.yml
vendored
33
.github/workflows/docker.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Create Docker images on tag
|
||||
name: Create Docker images on Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -11,6 +11,10 @@ on:
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
release-tag:
|
||||
description: 'Kestra Release Tag'
|
||||
required: false
|
||||
type: string
|
||||
plugin-version:
|
||||
description: 'Plugin version'
|
||||
required: false
|
||||
@@ -38,17 +42,16 @@ jobs:
|
||||
name: Publish Docker
|
||||
needs: [ plugins ]
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- name: "-no-plugins"
|
||||
plugins: ""
|
||||
packages: ""
|
||||
packages: jattach
|
||||
python-libs: ""
|
||||
- name: ""
|
||||
plugins: ${{needs.plugins.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libs: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -57,13 +60,22 @@ jobs:
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
if [[ "${{ inputs.release-tag }}" == "" ]]; then
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="${{ inputs.release-tag }}"
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ "${{ env.PLUGIN_VERSION }}" == *"-SNAPSHOT" ]]; then
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
# Download release
|
||||
- name: Download release
|
||||
uses: robinraju/release-downloader@v1.11
|
||||
uses: robinraju/release-downloader@v1.12
|
||||
with:
|
||||
tag: ${{steps.vars.outputs.tag}}
|
||||
fileName: 'kestra-*'
|
||||
@@ -77,6 +89,11 @@ jobs:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
||||
12
.github/workflows/e2e.yml
vendored
12
.github/workflows/e2e.yml
vendored
@@ -42,19 +42,13 @@ jobs:
|
||||
with:
|
||||
path: kestra
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
caches-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Get Docker Image
|
||||
- name: Download Kestra Image
|
||||
|
||||
67
.github/workflows/generate-translations.yml
vendored
Normal file
67
.github/workflows/generate-translations.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Auto-Translate UI keys and create PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9-21 * * *" # Every hour from 9 AM to 9 PM
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retranslate_modified_keys:
|
||||
description: "Whether to re-translate modified keys even if they already have translations."
|
||||
type: choice
|
||||
options:
|
||||
- "false"
|
||||
- "true"
|
||||
default: "false"
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
translations:
|
||||
name: Translations
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py ${{ github.event.inputs.retranslate_modified_keys }}
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Commit and create PR
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="chore/update-translations-$(date +%s)"
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): localize to languages other than English"
|
||||
git push -u origin $BRANCH_NAME || (git push origin --delete $BRANCH_NAME && git push -u origin $BRANCH_NAME)
|
||||
gh pr create --title "Translations from en.json" --body "This PR was created automatically by a GitHub Action." --base develop --head $BRANCH_NAME --assignee anna-geller --reviewer anna-geller
|
||||
111
.github/workflows/generate_translations.yml
vendored
111
.github/workflows/generate_translations.yml
vendored
@@ -1,111 +0,0 @@
|
||||
name: Generate Translations
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- "ui/src/translations/en.json"
|
||||
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
jobs:
|
||||
commit:
|
||||
name: Commit directly to PR
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == false }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 50
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Check for changes and commit
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
||||
pull_request:
|
||||
name: Open PR for a forked repository
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == true }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 10
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install gitpython openai
|
||||
|
||||
- name: Generate translations
|
||||
run: python ui/src/translations/generate_translations.py
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
- name: Check keys matching
|
||||
run: node ui/src/translations/check.js
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Action"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Create and push a new branch
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH_NAME="generated-translations-${{ github.event.pull_request.head.repo.name }}"
|
||||
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add ui/src/translations/*.json
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit. Exiting with success."
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore(translations): auto generate values for languages other than english"
|
||||
git push origin $BRANCH_NAME
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0)'
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
@@ -18,29 +18,50 @@ on:
|
||||
jobs:
|
||||
release:
|
||||
name: Release plugins
|
||||
runs-on: kestra-private-standard
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
- name: 'Configure Git'
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
if: ${{ github.event.inputs.dryRun == 'false' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./release-plugins.sh;
|
||||
./release-plugins.sh \
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
--yes \
|
||||
@@ -51,8 +72,9 @@ jobs:
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./release-plugins.sh;
|
||||
./release-plugins.sh \
|
||||
chmod +x ./dev-tools/release-plugins.sh;
|
||||
|
||||
./dev-tools/release-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--next-version=${{github.event.inputs.nextVersion}} \
|
||||
--dry-run \
|
||||
89
.github/workflows/gradle-release.yml
vendored
Normal file
89
.github/workflows/gradle-release.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Run Gradle Release
|
||||
run-name: "Releasing Kestra ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.0-rc1)'
|
||||
required: true
|
||||
type: string
|
||||
nextVersion:
|
||||
description: 'The next version (e.g., 0.22.0-SNAPSHOT)'
|
||||
required: true
|
||||
type: string
|
||||
env:
|
||||
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
|
||||
NEXT_VERSION: "${{ github.event.inputs.nextVersion }}"
|
||||
jobs:
|
||||
release:
|
||||
name: Release Kestra
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
run: |
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-rc[01](-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$NEXT_VERSION" =~ ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$ ]]; then
|
||||
echo "Invalid next version. Must match regex: ^[0-9]+(\.[0-9]+)\.0-SNAPSHOT$"
|
||||
exit 1;
|
||||
fi
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
PUSH_RELEASE_BRANCH="releases/v${BASE_VERSION}.x"
|
||||
|
||||
# Create and push release branch
|
||||
git checkout -b "$PUSH_RELEASE_BRANCH";
|
||||
git push -u origin "$PUSH_RELEASE_BRANCH";
|
||||
|
||||
# Run gradle release
|
||||
git checkout develop;
|
||||
|
||||
if [[ "$RELEASE_VERSION" == *"-SNAPSHOT" ]]; then
|
||||
# -SNAPSHOT qualifier maybe used to test release-candidates
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}" \
|
||||
-Prelease.failOnSnapshotDependencies=false
|
||||
else
|
||||
./gradlew release -Prelease.useAutomaticVersion=true \
|
||||
-Prelease.releaseVersion="${RELEASE_VERSION}" \
|
||||
-Prelease.newVersion="${NEXT_VERSION}" \
|
||||
-Prelease.pushReleaseVersionBranch="${PUSH_RELEASE_BRANCH}"
|
||||
fi
|
||||
535
.github/workflows/main.yml
vendored
535
.github/workflows/main.yml
vendored
@@ -1,529 +1,58 @@
|
||||
name: Main
|
||||
name: Main Workflow
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/*
|
||||
- develop
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
repository_dispatch:
|
||||
types: [rebuild]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: "Skip test"
|
||||
type: choice
|
||||
required: true
|
||||
default: "false"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
plugin-version:
|
||||
description: "Plugin version"
|
||||
required: false
|
||||
type: string
|
||||
default: "LATEST"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-main
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
JAVA_VERSION: "21"
|
||||
DOCKER_APT_PACKAGES: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
DOCKER_PYTHON_LIBRARIES: kestra
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins-list.outputs.plugins }}
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Npm
|
||||
- name: Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Set Plugin List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Build with Gradle
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Set up Vars
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Build & Export Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
|
||||
check-frontend:
|
||||
name: Run frontend tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
reporter: github-pr-review # Change reporter.
|
||||
workdir: "ui"
|
||||
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:unit
|
||||
|
||||
- name: Install Playwright
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Build Storybook
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run build-storybook --quiet
|
||||
|
||||
- name: Serve Storybook and run tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:127.0.0.1:6006 && npm run test-storybook"
|
||||
|
||||
# Run Reusable Workflow from QA repository
|
||||
check-e2e:
|
||||
name: Check E2E Tests
|
||||
needs: build-artifacts
|
||||
if: ${{ (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') && !startsWith(github.ref, 'refs/tags/v') }}
|
||||
uses: ./.github/workflows/e2e.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
backends: ["postgres"]
|
||||
tests:
|
||||
name: Execute tests
|
||||
uses: ./.github/workflows/workflow-test.yml
|
||||
with:
|
||||
tags: oss
|
||||
docker-artifact-name: ${{ needs.build-artifacts.outputs.docker-artifact-name }}
|
||||
docker-image-tag: kestra/kestra:${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
backend: ${{ matrix.backends }}
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
check:
|
||||
env:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
name: Check & Publish
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Build the docker-compose stack
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
|
||||
# Gradle check
|
||||
- name: Build with Gradle
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# Sonar
|
||||
- name: Analyze with Sonar
|
||||
if: ${{ env.SONAR_TOKEN != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# Allure check
|
||||
- name: Auth to Google Cloud
|
||||
id: auth
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }}
|
||||
uses: "google-github-actions/auth@v2"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
|
||||
- name: Publish allure report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: "Set up Cloud SDK"
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
uses: "google-github-actions/setup-gcloud@v2"
|
||||
|
||||
- name: "Copy jacoco files"
|
||||
if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }}
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# report test
|
||||
- name: Test Report
|
||||
uses: mikepenz/action-junit-report@v5
|
||||
if: success() || failure()
|
||||
with:
|
||||
report_paths: "**/build/test-results/**/TEST-*.xml"
|
||||
|
||||
# Codecov
|
||||
- uses: codecov/codecov-action@v5
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
report-status: false
|
||||
|
||||
release:
|
||||
name: Github Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check, check-e2e]
|
||||
if: |
|
||||
always() &&
|
||||
startsWith(github.ref, 'refs/tags/v') &&
|
||||
needs.check.result == 'success' &&
|
||||
(
|
||||
github.event.inputs.skip-test == 'true' ||
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
)
|
||||
steps:
|
||||
# Download Exec
|
||||
- name: Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
name: Release
|
||||
needs: [tests]
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
id: create_github_release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: steps.create_github_release.conclusion == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
|
||||
docker:
|
||||
name: Publish Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-artifacts, check, check-e2e]
|
||||
if: |
|
||||
always() &&
|
||||
github.ref == 'refs/heads/develop' &&
|
||||
needs.check.result == 'success' &&
|
||||
(
|
||||
github.event.inputs.skip-test == 'true' ||
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
)
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: ${{needs.build-artifacts.outputs.docker-tag}}-no-plugins
|
||||
packages: ""
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{needs.build-artifacts.outputs.docker-tag}}
|
||||
plugins: ${{needs.build-artifacts.outputs.plugins}}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Docker setup
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Vars
|
||||
- name: Set image name
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Docker Image
|
||||
- name: Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Copy exe to image
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{matrix.image.packages}}
|
||||
PYTHON_LIBRARIES=${{matrix.image.python-libraries}}
|
||||
|
||||
maven:
|
||||
name: Publish to Maven
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check, check-e2e]
|
||||
if: |
|
||||
always() &&
|
||||
(github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/tags/v')) &&
|
||||
needs.check.result == 'success' &&
|
||||
(
|
||||
github.event.inputs.skip-test == 'true' ||
|
||||
(needs.check-e2e.result == 'skipped' || needs.check-e2e.result == 'success')
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
path: actions
|
||||
ref: main
|
||||
|
||||
# Setup build
|
||||
- uses: ./actions/.github/actions/setup-build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish package to Sonatype
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype
|
||||
|
||||
# Release
|
||||
- name: Release package to Maven Central
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
run: |
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-e2e
|
||||
- check
|
||||
- maven
|
||||
- docker
|
||||
- release
|
||||
if: always()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
steps:
|
||||
# Update
|
||||
- name: Update internal
|
||||
- name: Github - Update internal
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
if: github.ref == 'refs/heads/develop' && needs.docker.result == 'success'
|
||||
with:
|
||||
@@ -533,7 +62,7 @@ jobs:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
- name: Slack - Notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
|
||||
67
.github/workflows/pull-request.yml
vendored
Normal file
67
.github/workflows/pull-request.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Pull Request Workflow
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
translations: ${{ steps.changes.outputs.translations }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: [file-changes]
|
||||
if: "needs.file-changes.outputs.ui == 'true'"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true'"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
end:
|
||||
name: End
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs: [frontend, backend]
|
||||
steps:
|
||||
# Slack
|
||||
- name: Slack notification
|
||||
uses: Gamesight/slack-workflow-status@master
|
||||
if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }}
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ":github-actions:"
|
||||
channel: "C02DQ1A7JLR"
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Update and Tag Kestra Plugins
|
||||
name: Set Version and Tag Plugins
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -14,7 +14,7 @@ on:
|
||||
jobs:
|
||||
tag:
|
||||
name: Release plugins
|
||||
runs-on: kestra-private-standard
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
@@ -24,30 +24,36 @@ jobs:
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: 'LATEST'
|
||||
|
||||
- name: 'Configure Git'
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Tag Plugins
|
||||
- name: Set Version and Tag Plugins
|
||||
if: ${{ github.event.inputs.dryRun == 'false' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./tag-release-plugins.sh;
|
||||
./tag-release-plugins.sh \
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--yes \
|
||||
${{ steps.plugins-list.outputs.repositories }}
|
||||
|
||||
- name: Run Gradle Release (DRY_RUN)
|
||||
- name: Set Version and Tag Plugins (DRY_RUN)
|
||||
if: ${{ github.event.inputs.dryRun == 'true' }}
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
chmod +x ./tag-release-plugins.sh;
|
||||
./tag-release-plugins.sh \
|
||||
chmod +x ./dev-tools/setversion-tag-plugins.sh;
|
||||
|
||||
./dev-tools/setversion-tag-plugins.sh \
|
||||
--release-version=${{github.event.inputs.releaseVersion}} \
|
||||
--dry-run \
|
||||
--yes \
|
||||
58
.github/workflows/setversion-tag.yml
vendored
Normal file
58
.github/workflows/setversion-tag.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Set Version and Tag
|
||||
run-name: "Set version and Tag Kestra to ${{ github.event.inputs.releaseVersion }} 🚀"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseVersion:
|
||||
description: 'The release version (e.g., 0.21.1)'
|
||||
required: true
|
||||
type: string
|
||||
env:
|
||||
RELEASE_VERSION: "${{ github.event.inputs.releaseVersion }}"
|
||||
jobs:
|
||||
release:
|
||||
name: Release Kestra
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/heads/releases/v')
|
||||
steps:
|
||||
# Checks
|
||||
- name: Check Inputs
|
||||
run: |
|
||||
if ! [[ "$RELEASE_VERSION" =~ ^[0-9]+(\.[0-9]+)(\.[0-9]+)(-rc[0-9])?(-SNAPSHOT)?$ ]]; then
|
||||
echo "Invalid release version. Must match regex: ^[0-9]+(\.[0-9]+)(\.[0-9]+)-(rc[0-9])?(-SNAPSHOT)?$"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CURRENT_BRANCH="{{ github.ref }}"
|
||||
|
||||
# Extract the major and minor versions
|
||||
BASE_VERSION=$(echo "$RELEASE_VERSION" | sed -E 's/^([0-9]+\.[0-9]+)\..*/\1/')
|
||||
RELEASE_BRANCH="refs/heads/releases/v${BASE_VERSION}.x"
|
||||
|
||||
if ! [[ "$CURRENT_BRANCH" == "$RELEASE_BRANCH" ]]; then
|
||||
echo "Invalid release branch. Expected $RELEASE_BRANCH, was $CURRENT_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Checkout
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
# Execute
|
||||
- name: Run Gradle Release
|
||||
env:
|
||||
GITHUB_PAT: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
run: |
|
||||
# Update version
|
||||
sed -i "s/^version=.*/version=$RELEASE_VERSION/" ./gradle.properties
|
||||
git add ./gradle.properties
|
||||
git commit -m"chore(version): update to version '$RELEASE_VERSION'"
|
||||
git push
|
||||
git tag -a "v$RELEASE_VERSION" -m"v$RELEASE_VERSION"
|
||||
git push origin "v$RELEASE_VERSION"
|
||||
10
.github/workflows/vulnerabilities-check.yml
vendored
10
.github/workflows/vulnerabilities-check.yml
vendored
@@ -33,6 +33,12 @@ jobs:
|
||||
node-enabled: true
|
||||
caches-enabled: true
|
||||
|
||||
# Npm
|
||||
- name: Npm - Install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Run OWASP dependency check plugin
|
||||
- name: Gradle Dependency Check
|
||||
env:
|
||||
@@ -74,7 +80,7 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: table
|
||||
@@ -107,7 +113,7 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.29.0
|
||||
uses: aquasecurity/trivy-action@0.30.0
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
|
||||
139
.github/workflows/workflow-backend-test.yml
vendored
Normal file
139
.github/workflows/workflow-backend-test.yml
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
name: Backend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
SONAR_TOKEN:
|
||||
description: 'Sonar Token'
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
name: Checkout - Current ref
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Setup - Start docker compose
|
||||
shell: bash
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
|
||||
# Gradle check
|
||||
- name: Gradle - Build
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
reporter: java-junit
|
||||
path: '**/build/test-results/test/TEST-*.xml'
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
if: env.SONAR_TOKEN != ''
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
shell: bash
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# GCP
|
||||
- name: GCP - Auth with unit test account
|
||||
id: auth
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
uses: "google-github-actions/auth@v2"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- name: GCP - Setup Cloud SDK
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
uses: "google-github-actions/setup-gcloud@v2"
|
||||
|
||||
# Allure check
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
name: Allure - Generate slug variables
|
||||
|
||||
- name: Allure - Publish report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: Jacoco - Copy reports
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# Codecov
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
152
.github/workflows/workflow-build-artifacts.yml
vendored
Normal file
152
.github/workflows/workflow-build-artifacts.yml
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
type: string
|
||||
outputs:
|
||||
docker-tag:
|
||||
value: ${{ jobs.build.outputs.docker-tag }}
|
||||
description: "The Docker image Tag for Kestra"
|
||||
docker-artifact-name:
|
||||
value: ${{ jobs.build.outputs.docker-artifact-name }}
|
||||
description: "The GitHub artifact containing the Kestra docker image name."
|
||||
plugins:
|
||||
value: ${{ jobs.build.outputs.plugins }}
|
||||
description: "The Kestra plugins list used for the build."
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build - Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Npm
|
||||
- name: Setup - Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Plugins - Set List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Gradle - Build
|
||||
shell: bash
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Artifacts - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Tag
|
||||
- name: Setup - Docker vars
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" ]]
|
||||
then
|
||||
TAG="latest";
|
||||
elif [[ $TAG = "develop" ]]
|
||||
then
|
||||
TAG="develop";
|
||||
elif [[ $TAG = v* ]]
|
||||
then
|
||||
TAG="${TAG}";
|
||||
else
|
||||
TAG="build-${{ github.run_id }}";
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "artifact=docker-kestra-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Build
|
||||
- name: Docker - Build & export image
|
||||
uses: docker/build-push-action@v6
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
file: Dockerfile
|
||||
tags: |
|
||||
kestra/kestra:${{ steps.vars.outputs.tag }}
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.plugins.outputs.plugins }}
|
||||
APT_PACKAGES=${{ env.DOCKER_APT_PACKAGES }}
|
||||
PYTHON_LIBRARIES=${{ env.DOCKER_PYTHON_LIBRARIES }}
|
||||
outputs: type=docker,dest=/tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
|
||||
# Upload artifacts
|
||||
- name: Artifacts - Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Artifacts - Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
|
||||
- name: Artifacts - Upload Docker
|
||||
uses: actions/upload-artifact@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
with:
|
||||
name: ${{ steps.vars.outputs.artifact }}
|
||||
path: /tmp/${{ steps.vars.outputs.artifact }}.tar
|
||||
84
.github/workflows/workflow-frontend-test.yml
vendored
Normal file
84
.github/workflows/workflow-frontend-test.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: Frontend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
|
||||
env:
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Npm - install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm - lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
reporter: github-pr-review
|
||||
workdir: ui
|
||||
|
||||
- name: Npm - Run build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run test:cicd
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Storybook - Build
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm run build-storybook --quiet
|
||||
|
||||
- name: Storybook - Run tests
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: |
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:127.0.0.1:6006 && npm run test:storybook"
|
||||
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN && github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
flags: frontend
|
||||
48
.github/workflows/workflow-github-release.yml
vendored
Normal file
48
.github/workflows/workflow-github-release.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: Github - Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "The Github personal token."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Github - Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Download Exec
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
# GitHub Release
|
||||
- name: GitHub - Create release
|
||||
id: create_github_release
|
||||
uses: "marvinpinto/action-automatic-releases@latest"
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
continue-on-error: true
|
||||
with:
|
||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
prerelease: false
|
||||
files: |
|
||||
build/executable/*
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
if: steps.create_github_release.conclusion == 'success'
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
100
.github/workflows/workflow-publish-docker.yml
vendored
Normal file
100
.github/workflows/workflow-publish-docker.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
name: Publish - Docker
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
||||
publish:
|
||||
name: Publish - Docker
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image:
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}-no-plugins
|
||||
packages: jattach
|
||||
python-libraries: ""
|
||||
|
||||
- tag: ${{ needs.build-artifacts.outputs.docker-tag }}
|
||||
plugins: ${{ needs.build-artifacts.outputs.plugins }}
|
||||
packages: python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach
|
||||
python-libraries: kestra
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Docker setup
|
||||
- name: Docker - Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Docker - Fix Qemu
|
||||
shell: bash
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -c yes
|
||||
|
||||
- name: Docker - Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Docker Login
|
||||
- name: Docker - Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
# Vars
|
||||
- name: Docker - Set image name
|
||||
shell: bash
|
||||
id: vars
|
||||
run: |
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://s01.oss.sonatype.org/content/repositories/snapshots ${{ matrix.image.plugins }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build Docker Image
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Docker - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Docker Build and push
|
||||
- name: Docker - Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: kestra/kestra:${{ matrix.image.tag }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
KESTRA_PLUGINS=${{ steps.vars.outputs.plugins }}
|
||||
APT_PACKAGES=${{ matrix.image.packages }}
|
||||
PYTHON_LIBRARIES=${{ matrix.image.python-libraries }}
|
||||
57
.github/workflows/workflow-publish-maven.yml
vendored
Normal file
57
.github/workflows/workflow-publish-maven.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Publish - Maven
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish - Maven
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Setup build
|
||||
- name: Setup - Build
|
||||
uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish - Release package to Maven Central
|
||||
shell: bash
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToSonatype ${{ startsWith(github.ref, 'refs/tags/v') && 'closeAndReleaseSonatypeStagingRepository' || '' }}
|
||||
|
||||
# Gradle dependency
|
||||
- name: Java - Gradle dependency graph
|
||||
uses: gradle/actions/dependency-submission@v4
|
||||
71
.github/workflows/workflow-release.yml
vendored
Normal file
71
.github/workflows/workflow-release.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
with:
|
||||
plugin-version: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
uses: ./.github/workflows/workflow-publish-maven.yml
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
97
.github/workflows/workflow-test.yml
vendored
Normal file
97
.github/workflows/workflow-test.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * 1,2,3,4,5'
|
||||
workflow_call:
|
||||
inputs:
|
||||
report-status:
|
||||
description: "Report status of the jobs in outputs"
|
||||
type: string
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
frontend_status:
|
||||
description: "Status of the frontend job"
|
||||
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status:
|
||||
description: "Status of the backend job"
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
# Output every job status
|
||||
# To be used in other workflows
|
||||
report-status:
|
||||
name: Report Status
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: always() && (inputs.report-status == 'true')
|
||||
outputs:
|
||||
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
|
||||
steps:
|
||||
- id: set-frontend-status
|
||||
name: Set frontend job status
|
||||
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
|
||||
|
||||
- id: set-backend-status
|
||||
name: Set backend job status
|
||||
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
|
||||
|
||||
notify:
|
||||
name: Notify - Slack
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: |
|
||||
always() && (needs.frontend.result != 'success' ||
|
||||
needs.backend.result != 'success')
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -36,6 +36,8 @@ yarn.lock
|
||||
ui/coverage
|
||||
ui/stats.html
|
||||
ui/.frontend-gradle-plugin
|
||||
ui/utils/CHANGELOG.md
|
||||
ui/test-report.junit.xml
|
||||
|
||||
### Docker
|
||||
/.env
|
||||
@@ -56,3 +58,4 @@ core/src/main/resources/gradle.properties
|
||||
**/allure-results/*
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
|
||||
2
.plugins
2
.plugins
@@ -32,6 +32,7 @@
|
||||
#plugin-git:io.kestra.plugin:plugin-git:LATEST
|
||||
#plugin-github:io.kestra.plugin:plugin-github:LATEST
|
||||
#plugin-googleworkspace:io.kestra.plugin:plugin-googleworkspace:LATEST
|
||||
#plugin-graalvm:io.kestra.plugin:plugin-graalvm:LATEST
|
||||
#plugin-hightouch:io.kestra.plugin:plugin-hightouch:LATEST
|
||||
#plugin-hubspot:io.kestra.plugin:plugin-hubspot:LATEST
|
||||
#plugin-huggingface:io.kestra.plugin:plugin-huggingface:LATEST
|
||||
@@ -40,6 +41,7 @@
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-db2:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-duckdb:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-druid:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mariadb:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-mysql:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-oracle:LATEST
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-pinot:LATEST
|
||||
|
||||
73
Makefile
73
Makefile
@@ -17,6 +17,8 @@ VERSION := $(shell ./gradlew properties -q | awk '/^version:/ {print $$2}')
|
||||
GIT_COMMIT := $(shell git rev-parse --short HEAD)
|
||||
GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD)
|
||||
DATE := $(shell date --rfc-3339=seconds)
|
||||
PLUGIN_GIT_DIR ?= $(pwd)/..
|
||||
PLUGIN_JARS_DIR ?= $(pwd)/locals/plugins
|
||||
|
||||
DOCKER_IMAGE = kestra/kestra
|
||||
DOCKER_PATH = ./
|
||||
@@ -87,7 +89,7 @@ build-docker: build-exec
|
||||
--compress \
|
||||
--rm \
|
||||
-f ./Dockerfile \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip" \
|
||||
--build-arg="APT_PACKAGES=python3 python3-venv python-is-python3 python3-pip nodejs npm curl zip unzip jattach" \
|
||||
--build-arg="PYTHON_LIBRARIES=kestra" \
|
||||
-t ${DOCKER_IMAGE}:${VERSION} ${DOCKER_PATH} || exit 1 ;
|
||||
|
||||
@@ -174,3 +176,72 @@ start-standalone-postgres: kill --private-start-standalone-postgres health
|
||||
|
||||
start-standalone-local: kill --private-start-standalone-local health
|
||||
|
||||
#checkout all plugins
|
||||
clone-plugins:
|
||||
@echo "Using PLUGIN_GIT_DIR: $(PLUGIN_GIT_DIR)"
|
||||
@mkdir -p "$(PLUGIN_GIT_DIR)"
|
||||
@echo "Fetching repository list from GitHub..."
|
||||
@REPOS=$(gh repo list kestra-io -L 1000 --json name | jq -r .[].name | sort | grep "^plugin-") \
|
||||
for repo in $$REPOS; do \
|
||||
if [[ $$repo == plugin-* ]]; then \
|
||||
if [ -d "$(PLUGIN_GIT_DIR)/$$repo" ]; then \
|
||||
echo "Skipping: $$repo (Already cloned)"; \
|
||||
else \
|
||||
echo "Cloning: $$repo using SSH..."; \
|
||||
git clone "git@github.com:kestra-io/$$repo.git" "$(PLUGIN_GIT_DIR)/$$repo"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done
|
||||
@echo "Done!"
|
||||
|
||||
# Update all plugins jar
|
||||
build-plugins:
|
||||
@echo "🔍 Scanning repositories in '$(PLUGIN_GIT_DIR)'..."
|
||||
@MASTER_REPOS=(); \
|
||||
for repo in "$(PLUGIN_GIT_DIR)"/*; do \
|
||||
if [ -d "$$repo/.git" ]; then \
|
||||
branch=$$(git -C "$$repo" rev-parse --abbrev-ref HEAD); \
|
||||
if [[ "$$branch" == "master" || "$$branch" == "main" ]]; then \
|
||||
MASTER_REPOS+=("$$repo"); \
|
||||
else \
|
||||
echo "❌ Skipping: $$(basename "$$repo") (Not on master or main branch)"; \
|
||||
fi; \
|
||||
fi; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 2: Update Repos on Master or Main Branch === \
|
||||
echo "⬇️ Updating repositories on master or main branch..."; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
echo "🔄 Updating: $$(basename "$$repo")"; \
|
||||
git -C "$$repo" pull --rebase; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 3: Build with Gradle === \
|
||||
echo "⚙️ Building repositories with Gradle..."; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
echo "🔨 Building: $$(basename "$$repo")"; \
|
||||
gradle clean build -x test shadowJar -p "$$repo"; \
|
||||
done; \
|
||||
\
|
||||
# === STEP 4: Copy Latest JARs (Ignoring javadoc & sources) === \
|
||||
echo "📦 Organizing built JARs..."; \
|
||||
mkdir -p "$(PLUGIN_JARS_DIR)"; \
|
||||
for repo in "$${MASTER_REPOS[@]}"; do \
|
||||
REPO_NAME=$$(basename "$$repo"); \
|
||||
\
|
||||
JARS=($$(find "$$repo" -type f -name "plugin-*.jar" ! -name "*-javadoc.jar" ! -name "*-sources.jar")); \
|
||||
if [ $${#JARS[@]} -eq 0 ]; then \
|
||||
echo "⚠️ Warning: No valid plugin JARs found for $$REPO_NAME"; \
|
||||
continue; \
|
||||
fi; \
|
||||
\
|
||||
for jar in "$${JARS[@]}"; do \
|
||||
JAR_NAME=$$(basename "$$jar"); \
|
||||
BASE_NAME=$$(echo "$$JAR_NAME" | sed -E 's/(-[0-9]+.*)?\.jar$$//'); \
|
||||
rm -f "$(PLUGIN_JARS_DIR)/$$BASE_NAME"-[0-9]*.jar; \
|
||||
cp "$$jar" "$(PLUGIN_JARS_DIR)/"; \
|
||||
echo "✅ Copied JAR: $$JAR_NAME"; \
|
||||
done; \
|
||||
done; \
|
||||
\
|
||||
echo "🎉 Done! All master and main branch repos updated, built, and organized."
|
||||
@@ -24,6 +24,13 @@
|
||||
<a href="https://www.youtube.com/@kestra-io"><img height="25" src="https://kestra.io/youtube.svg" alt="youtube" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://trendshift.io/repositories/2714" target="_blank">
|
||||
<img src="https://trendshift.io/api/badge/repositories/2714" alt="kestra-io%2Fkestra | Trendshift" width="250" height="55"/>
|
||||
</a>
|
||||
<a href="https://www.producthunt.com/posts/kestra?embed=true&utm_source=badge-top-post-badge&utm_medium=badge&utm_souce=badge-kestra" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/top-post-badge.svg?post_id=612077&theme=light&period=daily&t=1740737506162" alt="Kestra - All-in-one automation & orchestration platform | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://go.kestra.io/video/product-overview" target="_blank">
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
|
||||
@@ -47,7 +54,7 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
- **Structure & Resilience**: tame chaos and bring resilience to your workflows with **namespaces**, **labels**, **subflows**, **retries**, **timeout**, **error handling**, **inputs**, **outputs** that generate artifacts in the UI, **variables**, **conditional branching**, **advanced scheduling**, **event triggers**, **backfills**, **dynamic tasks**, **sequential and parallel tasks**, and skip tasks or triggers when needed by setting the flag `disabled` to `true`.
|
||||
|
||||
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
🧑💻 The YAML definition gets automatically adjusted any time you make changes to a workflow from the UI or via an API call. Therefore, the orchestration logic is **always managed declaratively in code**, even if you modify your workflows in other ways (UI, CI/CD, Terraform, API calls).
|
||||
|
||||
|
||||
<p align="center">
|
||||
|
||||
13
build.gradle
13
build.gradle
@@ -16,7 +16,7 @@ plugins {
|
||||
id "java"
|
||||
id 'java-library'
|
||||
id "idea"
|
||||
id "com.gradleup.shadow" version "8.3.5"
|
||||
id "com.gradleup.shadow" version "8.3.6"
|
||||
id "application"
|
||||
|
||||
// test
|
||||
@@ -28,18 +28,18 @@ plugins {
|
||||
id "com.github.ben-manes.versions" version "0.52.0"
|
||||
|
||||
// front
|
||||
id 'org.siouan.frontend-jdk21' version '10.0.0' apply false
|
||||
id 'com.github.node-gradle.node' version '7.1.0'
|
||||
|
||||
// release
|
||||
id "io.github.gradle-nexus.publish-plugin" version "2.0.0"
|
||||
id 'net.researchgate.release' version '3.1.0'
|
||||
id "com.gorylenko.gradle-git-properties" version "2.4.2"
|
||||
id "com.gorylenko.gradle-git-properties" version "2.5.0"
|
||||
id 'signing'
|
||||
id 'ru.vyarus.pom' version '3.0.0' apply false
|
||||
id 'ru.vyarus.github-info' version '2.0.0' apply false
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.0.1" apply false
|
||||
id "org.owasp.dependencycheck" version "12.1.0" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -74,7 +74,7 @@ dependencies {
|
||||
**********************************************************************************************************************/
|
||||
allprojects {
|
||||
if (it.name != 'platform') {
|
||||
group "io.kestra"
|
||||
group = "io.kestra"
|
||||
|
||||
java {
|
||||
sourceCompatibility = targetJavaVersion
|
||||
@@ -121,7 +121,6 @@ allprojects {
|
||||
micronaut "io.micronaut:micronaut-management"
|
||||
micronaut "io.micrometer:micrometer-core"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-prometheus"
|
||||
micronaut "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
micronaut "io.micronaut:micronaut-http-client"
|
||||
micronaut "io.micronaut.reactor:micronaut-reactor-http-client"
|
||||
micronaut "io.micronaut.tracing:micronaut-tracing-opentelemetry-http"
|
||||
@@ -280,7 +279,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.22.1"
|
||||
agent "org.aspectj:aspectjweaver:1.9.23"
|
||||
}
|
||||
|
||||
test {
|
||||
|
||||
@@ -12,18 +12,9 @@ dependencies {
|
||||
implementation 'ch.qos.logback.contrib:logback-json-classic'
|
||||
implementation 'ch.qos.logback.contrib:logback-jackson'
|
||||
|
||||
// plugins
|
||||
implementation 'org.eclipse.aether:aether-api'
|
||||
implementation 'org.eclipse.aether:aether-spi'
|
||||
implementation 'org.eclipse.aether:aether-util'
|
||||
implementation 'org.eclipse.aether:aether-impl'
|
||||
implementation 'org.eclipse.aether:aether-connector-basic'
|
||||
implementation 'org.eclipse.aether:aether-transport-file'
|
||||
implementation 'org.eclipse.aether:aether-transport-http'
|
||||
implementation('org.apache.maven:maven-aether-provider') {
|
||||
// sisu dependency injector is not used
|
||||
exclude group: 'org.eclipse.sisu'
|
||||
}
|
||||
// OTLP metrics
|
||||
implementation "io.micronaut.micrometer:micronaut-micrometer-registry-otlp"
|
||||
|
||||
// aether still use javax.inject
|
||||
compileOnly 'javax.inject:javax.inject:1'
|
||||
|
||||
@@ -43,4 +34,7 @@ dependencies {
|
||||
implementation project(":storage-local")
|
||||
|
||||
implementation project(":webserver")
|
||||
|
||||
//test
|
||||
testImplementation "org.wiremock:wiremock"
|
||||
}
|
||||
|
||||
@@ -4,16 +4,17 @@ import ch.qos.logback.classic.LoggerContext;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.commands.servers.ServerCommandInterface;
|
||||
import io.kestra.cli.services.StartupHookInterface;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.webserver.services.FlowAutoLoaderService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.yaml.YamlPropertySourceLoader;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import io.micronaut.management.endpoint.EndpointDefaultConfiguration;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import jakarta.inject.Provider;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import io.kestra.core.utils.Rethrow;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -26,10 +27,13 @@ import java.nio.file.Paths;
|
||||
import java.text.MessageFormat;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Callable;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
versionProvider = VersionProvider.class,
|
||||
mixinStandardHelpOptions = true,
|
||||
showDefaultValues = true
|
||||
@@ -49,22 +53,28 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
@Inject
|
||||
private io.kestra.core.utils.VersionProvider versionProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginRegistry> pluginRegistryProvider;
|
||||
|
||||
@Inject
|
||||
protected Provider<PluginManager> pluginManagerProvider;
|
||||
|
||||
private PluginRegistry pluginRegistry;
|
||||
|
||||
@CommandLine.Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
@Option(names = {"-v", "--verbose"}, description = "Change log level. Multiple -v options increase the verbosity.", showDefaultValue = CommandLine.Help.Visibility.NEVER)
|
||||
private boolean[] verbose = new boolean[0];
|
||||
|
||||
@CommandLine.Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
@Option(names = {"-l", "--log-level"}, description = "Change log level (values: ${COMPLETION-CANDIDATES})")
|
||||
private LogLevel logLevel = LogLevel.INFO;
|
||||
|
||||
@CommandLine.Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
@Option(names = {"--internal-log"}, description = "Change also log level for internal log")
|
||||
private boolean internalLog = false;
|
||||
|
||||
@CommandLine.Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
@Option(names = {"-c", "--config"}, description = "Path to a configuration file")
|
||||
private Path config = Paths.get(System.getProperty("user.home"), ".kestra/config.yml");
|
||||
|
||||
@CommandLine.Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = System.getenv("KESTRA_PLUGINS_PATH") != null ? Paths.get(System.getenv("KESTRA_PLUGINS_PATH")) : null;
|
||||
@Option(names = {"-p", "--plugins"}, description = "Path to plugins directory")
|
||||
protected Path pluginsPath = Optional.ofNullable(System.getenv("KESTRA_PLUGINS_PATH")).map(Paths::get).orElse(null);
|
||||
|
||||
public enum LogLevel {
|
||||
TRACE,
|
||||
@@ -76,7 +86,7 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(CommandLine.Command.class).name());
|
||||
Thread.currentThread().setName(this.getClass().getDeclaredAnnotation(Command.class).name());
|
||||
startLogger();
|
||||
sendServerLog();
|
||||
if (this.startupHook != null) {
|
||||
@@ -84,8 +94,14 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
}
|
||||
|
||||
if (this.pluginsPath != null && loadExternalPlugins()) {
|
||||
pluginRegistry = pluginRegistry();
|
||||
pluginRegistry = pluginRegistryProvider.get();
|
||||
pluginRegistry.registerIfAbsent(pluginsPath);
|
||||
|
||||
// PluginManager mus only be initialized if a registry is also instantiated
|
||||
if (isPluginManagerEnabled()) {
|
||||
PluginManager manager = pluginManagerProvider.get();
|
||||
manager.start();
|
||||
}
|
||||
}
|
||||
|
||||
startWebserver();
|
||||
@@ -102,8 +118,15 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected PluginRegistry pluginRegistry() {
|
||||
return KestraContext.getContext().getPluginRegistry(); // Lazy init
|
||||
/**
|
||||
* Specifies whether the {@link PluginManager} service must be initialized.
|
||||
* <p>
|
||||
* This method can be overridden by concrete commands.
|
||||
*
|
||||
* @return {@code true} if the {@link PluginManager} service must be initialized.
|
||||
*/
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private static String message(String message, Object... format) {
|
||||
@@ -157,7 +180,6 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
logger.getName().startsWith("io.kestra") &&
|
||||
!logger.getName().startsWith("io.kestra.ee.runner.kafka.services"))
|
||||
)
|
||||
|| logger.getName().startsWith("flow")
|
||||
)
|
||||
.forEach(
|
||||
logger -> logger.setLevel(ch.qos.logback.classic.Level.valueOf(this.logLevel.name()))
|
||||
@@ -183,9 +205,9 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
if (this.endpointConfiguration.getPort().isPresent()) {
|
||||
URI endpoint = null;
|
||||
try {
|
||||
endpoint = new URIBuilder(server.getURL().toURI())
|
||||
.setPort(this.endpointConfiguration.getPort().get())
|
||||
.setPath("/health")
|
||||
endpoint = UriBuilder.of(server.getURL().toURI())
|
||||
.port(this.endpointConfiguration.getPort().get())
|
||||
.path("/health")
|
||||
.build();
|
||||
} catch (URISyntaxException e) {
|
||||
e.printStackTrace();
|
||||
@@ -207,10 +229,12 @@ abstract public class AbstractCommand implements Callable<Integer> {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected void shutdownHook(Rethrow.RunnableChecked<Exception> run) {
|
||||
protected void shutdownHook(boolean logShutdown, Rethrow.RunnableChecked<Exception> run) {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(
|
||||
() -> {
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
if (logShutdown) {
|
||||
log.warn("Receiving shutdown ! Try to graceful exit");
|
||||
}
|
||||
try {
|
||||
run.run();
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -88,11 +88,12 @@ public class App implements Callable<Integer> {
|
||||
.environments(Environment.CLI);
|
||||
|
||||
CommandLine cmd = new CommandLine(mainClass, CommandLine.defaultFactory());
|
||||
continueOnParsingErrors(cmd);
|
||||
|
||||
CommandLine.ParseResult parseResult = cmd.parseArgs(args);
|
||||
List<CommandLine> parsedCommands = parseResult.asCommandLineList();
|
||||
|
||||
CommandLine commandLine = parsedCommands.get(parsedCommands.size() - 1);
|
||||
CommandLine commandLine = parsedCommands.getLast();
|
||||
Class<?> cls = commandLine.getCommandSpec().userObject().getClass();
|
||||
|
||||
if (AbstractCommand.class.isAssignableFrom(cls)) {
|
||||
@@ -114,15 +115,17 @@ public class App implements Callable<Integer> {
|
||||
.stream()
|
||||
.filter(argSpec -> ((Field) argSpec.userObject()).getName().equals("serverPort"))
|
||||
.findFirst()
|
||||
.ifPresent(argSpec -> {
|
||||
properties.put("micronaut.server.port", argSpec.getValue());
|
||||
});
|
||||
.ifPresent(argSpec -> properties.put("micronaut.server.port", argSpec.getValue()));
|
||||
|
||||
builder.properties(properties);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static void continueOnParsingErrors(CommandLine cmd) {
|
||||
cmd.getCommandSpec().parser().collectErrors(true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T getPropertiesFromMethod(Class<?> cls, String methodName, Object instance) {
|
||||
try {
|
||||
|
||||
@@ -33,6 +33,9 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
@CommandLine.Option(names = {"--delete"}, negatable = true, description = "Whether missing should be deleted")
|
||||
public boolean delete = false;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The parent namespace of the flows, if not set, every namespace are allowed.")
|
||||
public String namespace;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -58,8 +61,12 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
body = String.join("\n---\n", flows);
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
String namespaceQuery = "";
|
||||
if (namespace != null) {
|
||||
namespaceQuery = "&namespace=" + namespace;
|
||||
}
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/bulk") + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/bulk") + "?allowNamespaceChild=true&delete=" + delete + namespaceQuery, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -39,7 +39,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
Flow flow = (Flow) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow));
|
||||
warnings.addAll(flowService.warnings(flow, this.tenantId));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
|
||||
@@ -1,31 +1,36 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import picocli.CommandLine;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.SneakyThrows;
|
||||
import picocli.CommandLine.Command;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
name = "plugins",
|
||||
description = "Manage plugins",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
PluginInstallCommand.class,
|
||||
PluginListCommand.class,
|
||||
PluginDocCommand.class
|
||||
PluginDocCommand.class,
|
||||
PluginSearchCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class PluginCommand extends AbstractCommand {
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
PicocliRunner.call(App.class, "plugins", "--help");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.io.Files;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.docs.DocumentationGenerator;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -43,8 +43,10 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
super.call();
|
||||
DocumentationGenerator documentationGenerator = applicationContext.getBean(DocumentationGenerator.class);
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
PluginRegistry registry = pluginRegistryProvider.get();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
boolean hasFailures = false;
|
||||
|
||||
for (RegisteredPlugin registeredPlugin : plugins) {
|
||||
try {
|
||||
documentationGenerator
|
||||
@@ -61,7 +63,7 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
Files
|
||||
.asCharSink(
|
||||
file,
|
||||
Charsets.UTF_8
|
||||
StandardCharsets.UTF_8
|
||||
).write(s.getBody());
|
||||
stdOut("Generate doc in: {0}", file);
|
||||
|
||||
@@ -101,4 +103,10 @@ public class PluginDocCommand extends AbstractCommand {
|
||||
|
||||
return hasFailures ? 1 : 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@Override
|
||||
protected boolean isPluginManagerEnabled() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,98 +1,123 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import io.kestra.core.contexts.MavenPluginRepositoryConfig;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginCatalogService;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import io.micronaut.http.uri.UriBuilder;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.plugins.PluginDownloader;
|
||||
import io.kestra.cli.plugins.RepositoryConfig;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
name = "install",
|
||||
description = "Install plugins"
|
||||
)
|
||||
public class PluginInstallCommand extends AbstractCommand {
|
||||
@CommandLine.Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates.")
|
||||
|
||||
@Option(names = {"--locally"}, description = "Specifies if plugins must be installed locally. If set to false the installation depends on your Kestra configuration.")
|
||||
boolean locally = true;
|
||||
|
||||
@Option(names = {"--all"}, description = "Install all available plugins")
|
||||
boolean all = false;
|
||||
|
||||
@Parameters(index = "0..*", description = "Plugins to install. Represented as Maven artifact coordinates (i.e., <groupId>:<artifactId>:(<version>|LATEST)")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@CommandLine.Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
@Option(names = {"--repositories"}, description = "URL to additional Maven repositories")
|
||||
private URI[] repositories;
|
||||
|
||||
@CommandLine.Spec
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
private PluginDownloader pluginDownloader;
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Inject
|
||||
@Client("api") HttpClient httpClient;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (this.pluginsPath == null) {
|
||||
if (this.locally && this.pluginsPath == null) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " +
|
||||
"or environment variable 'KESTRA_PLUGINS_PATH"
|
||||
);
|
||||
}
|
||||
|
||||
if (!pluginsPath.toFile().exists()) {
|
||||
if (!pluginsPath.toFile().mkdir()) {
|
||||
throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
List<MavenPluginRepositoryConfig> repositoryConfigs = List.of();
|
||||
if (repositories != null) {
|
||||
Arrays.stream(repositories)
|
||||
.forEach(throwConsumer(s -> {
|
||||
URIBuilder uriBuilder = new URIBuilder(s);
|
||||
|
||||
RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder()
|
||||
repositoryConfigs = Arrays.stream(repositories)
|
||||
.map(uri -> {
|
||||
MavenPluginRepositoryConfig.MavenPluginRepositoryConfigBuilder builder = MavenPluginRepositoryConfig
|
||||
.builder()
|
||||
.id(IdUtils.create());
|
||||
|
||||
if (uriBuilder.getUserInfo() != null) {
|
||||
int index = uriBuilder.getUserInfo().indexOf(":");
|
||||
|
||||
builder.basicAuth(new RepositoryConfig.BasicAuth(
|
||||
uriBuilder.getUserInfo().substring(0, index),
|
||||
uriBuilder.getUserInfo().substring(index + 1)
|
||||
String userInfo = uri.getUserInfo();
|
||||
if (userInfo != null) {
|
||||
String[] userInfoParts = userInfo.split(":");
|
||||
builder = builder.basicAuth(new MavenPluginRepositoryConfig.BasicAuth(
|
||||
userInfoParts[0],
|
||||
userInfoParts[1]
|
||||
));
|
||||
|
||||
uriBuilder.setUserInfo(null);
|
||||
}
|
||||
|
||||
builder.url(uriBuilder.build().toString());
|
||||
|
||||
pluginDownloader.addRepository(builder.build());
|
||||
}));
|
||||
builder.url(UriBuilder.of(uri).userInfo(null).build().toString());
|
||||
return builder.build();
|
||||
}).toList();
|
||||
}
|
||||
|
||||
List<URL> resolveUrl = pluginDownloader.resolve(dependencies);
|
||||
stdOut("Resolved Plugin(s) with {0}", resolveUrl);
|
||||
if (all) {
|
||||
PluginCatalogService service = new PluginCatalogService(httpClient, false, true);
|
||||
dependencies = service.get().stream().map(Objects::toString).toList();
|
||||
}
|
||||
|
||||
for (URL url: resolveUrl) {
|
||||
Files.copy(
|
||||
Paths.get(url.toURI()),
|
||||
Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
if (dependencies.isEmpty()) {
|
||||
stdErr("Error: No plugin to install.");
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
final List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
try (final PluginManager pluginManager = getPluginManager()) {
|
||||
List<PluginArtifact> installed = pluginManager.install(
|
||||
pluginArtifacts,
|
||||
repositoryConfigs,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = installed.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
}
|
||||
|
||||
stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath);
|
||||
|
||||
return 0;
|
||||
private PluginManager getPluginManager() {
|
||||
return locally ? new LocalPluginManager(mavenPluginRepositoryProvider.get()) : this.pluginManagerProvider.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,22 +1,31 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
@Command(
|
||||
name = "list",
|
||||
description = "List all plugins already installed"
|
||||
)
|
||||
public class PluginListCommand extends AbstractCommand {
|
||||
@CommandLine.Spec
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@CommandLine.Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
@Option(names = {"--core"}, description = "Also write core tasks plugins")
|
||||
private boolean core = false;
|
||||
|
||||
@Inject
|
||||
private PluginRegistry registry;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -27,7 +36,8 @@ public class PluginListCommand extends AbstractCommand {
|
||||
);
|
||||
}
|
||||
|
||||
List<RegisteredPlugin> plugins = core ? pluginRegistry().plugins() : pluginRegistry().externalPlugins();
|
||||
List<RegisteredPlugin> plugins = core ? registry.plugins() : registry.externalPlugins();
|
||||
|
||||
plugins.forEach(registeredPlugin -> stdOut(registeredPlugin.toString()));
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -0,0 +1,149 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
name = "search",
|
||||
description = "Search for available Kestra plugins"
|
||||
)
|
||||
public class PluginSearchCommand extends AbstractCommand {
|
||||
@Inject
|
||||
@Client("api")
|
||||
private HttpClient httpClient;
|
||||
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
private static final char SPACE = ' ';
|
||||
|
||||
@Parameters(index = "0", description = "Search term (optional)", defaultValue = "")
|
||||
private String searchTerm;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try {
|
||||
JsonNode root = fetchPlugins();
|
||||
List<PluginInfo> plugins = findPlugins(root);
|
||||
printResults(plugins);
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
stdOut("Error processing plugins: {0}", e.getMessage());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private JsonNode fetchPlugins() throws Exception {
|
||||
String response = httpClient.toBlocking()
|
||||
.retrieve(
|
||||
HttpRequest.GET("/v1/plugins")
|
||||
.header("Accept", "application/json")
|
||||
);
|
||||
return MAPPER.readTree(response);
|
||||
}
|
||||
|
||||
private List<PluginInfo> findPlugins(JsonNode root) {
|
||||
String searchTermLower = searchTerm.toLowerCase();
|
||||
List<PluginInfo> plugins = new ArrayList<>();
|
||||
|
||||
for (JsonNode plugin : root) {
|
||||
if (matchesSearch(plugin, searchTermLower)) {
|
||||
plugins.add(new PluginInfo(
|
||||
plugin.path("name").asText(),
|
||||
plugin.path("title").asText(),
|
||||
plugin.path("group").asText(),
|
||||
plugin.path("version").asText("")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
plugins.sort((p1, p2) -> p1.name.compareToIgnoreCase(p2.name));
|
||||
return plugins;
|
||||
}
|
||||
|
||||
private boolean matchesSearch(JsonNode plugin, String term) {
|
||||
if (term.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return plugin.path("name").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("title").asText().toLowerCase().contains(term) ||
|
||||
plugin.path("group").asText().toLowerCase().contains(term);
|
||||
}
|
||||
|
||||
private void printResults(List<PluginInfo> plugins) {
|
||||
if (plugins.isEmpty()) {
|
||||
stdOut("No plugins found{0}",
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'");
|
||||
return;
|
||||
}
|
||||
|
||||
stdOut("\nFound {0} plugins{1}",
|
||||
plugins.size(),
|
||||
searchTerm.isEmpty() ? "" : " matching '" + searchTerm + "'"
|
||||
);
|
||||
|
||||
printPluginsTable(plugins);
|
||||
}
|
||||
|
||||
private void printPluginsTable(List<PluginInfo> plugins) {
|
||||
int maxName = 4, maxTitle = 5, maxGroup = 5;
|
||||
for (PluginInfo plugin : plugins) {
|
||||
maxName = Math.max(maxName, plugin.name.length());
|
||||
maxTitle = Math.max(maxTitle, plugin.title.length());
|
||||
maxGroup = Math.max(maxGroup, plugin.group.length());
|
||||
}
|
||||
|
||||
StringBuilder namePad = new StringBuilder(maxName);
|
||||
StringBuilder titlePad = new StringBuilder(maxTitle);
|
||||
StringBuilder groupPad = new StringBuilder(maxGroup);
|
||||
|
||||
stdOut("");
|
||||
printRow(namePad, titlePad, groupPad, "NAME", "TITLE", "GROUP", "VERSION",
|
||||
maxName, maxTitle, maxGroup);
|
||||
|
||||
for (PluginInfo plugin : plugins) {
|
||||
printRow(namePad, titlePad, groupPad, plugin.name, plugin.title, plugin.group, plugin.version,
|
||||
maxName, maxTitle, maxGroup);
|
||||
}
|
||||
stdOut("");
|
||||
}
|
||||
|
||||
private void printRow(StringBuilder namePad, StringBuilder titlePad, StringBuilder groupPad,
|
||||
String name, String title, String group, String version,
|
||||
int maxName, int maxTitle, int maxGroup) {
|
||||
stdOut("{0} {1} {2} {3}",
|
||||
pad(namePad, name, maxName),
|
||||
pad(titlePad, title, maxTitle),
|
||||
pad(groupPad, group, maxGroup),
|
||||
version
|
||||
);
|
||||
}
|
||||
|
||||
private String pad(StringBuilder sb, String str, int length) {
|
||||
sb.setLength(0);
|
||||
sb.append(str);
|
||||
while (sb.length() < length) {
|
||||
sb.append(SPACE);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private record PluginInfo(String name, String title, String group, String version) {}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.plugins.LocalPluginManager;
|
||||
import io.kestra.core.plugins.MavenPluginDownloader;
|
||||
import io.kestra.core.plugins.PluginArtifact;
|
||||
import io.kestra.core.plugins.PluginManager;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Parameters;
|
||||
import picocli.CommandLine.Spec;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "uninstall",
|
||||
description = "uninstall a plugin"
|
||||
)
|
||||
public class PluginUninstallCommand extends AbstractCommand {
|
||||
@Parameters(index = "0..*", description = "the plugins to uninstall")
|
||||
List<String> dependencies = new ArrayList<>();
|
||||
|
||||
@Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
Provider<MavenPluginDownloader> mavenPluginRepositoryProvider;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
List<PluginArtifact> pluginArtifacts;
|
||||
try {
|
||||
pluginArtifacts = dependencies.stream().map(PluginArtifact::fromCoordinates).toList();
|
||||
} catch (IllegalArgumentException e) {
|
||||
stdErr(e.getMessage());
|
||||
return CommandLine.ExitCode.USAGE;
|
||||
}
|
||||
|
||||
final PluginManager pluginManager;
|
||||
|
||||
// If a PLUGIN_PATH is provided, then use the LocalPluginManager
|
||||
if (pluginsPath != null) {
|
||||
pluginManager = new LocalPluginManager(mavenPluginRepositoryProvider.get());
|
||||
} else {
|
||||
// Otherwise, we delegate to the configured plugin-manager.
|
||||
pluginManager = this.pluginManagerProvider.get();
|
||||
}
|
||||
|
||||
List<PluginArtifact> uninstalled = pluginManager.uninstall(
|
||||
pluginArtifacts,
|
||||
false,
|
||||
pluginsPath
|
||||
);
|
||||
|
||||
List<URI> uris = uninstalled.stream().map(PluginArtifact::uri).toList();
|
||||
stdOut("Successfully uninstalled plugins {0} from {1}", dependencies, uris);
|
||||
return CommandLine.ExitCode.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean loadExternalPlugins() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,20 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import picocli.CommandLine;
|
||||
|
||||
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
|
||||
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
|
||||
Integer serverPort;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
|
||||
return super.call();
|
||||
}
|
||||
|
||||
protected static int defaultWorkerThread() {
|
||||
return Runtime.getRuntime().availableProcessors() * 4;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
@@ -9,7 +8,6 @@ import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
@@ -20,7 +18,6 @@ import java.util.Map;
|
||||
name = "executor",
|
||||
description = "Start the Kestra executor"
|
||||
)
|
||||
@Slf4j
|
||||
public class ExecutorCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -66,13 +63,10 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
|
||||
executorService.run();
|
||||
|
||||
log.info("Executor started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Map;
|
||||
@@ -16,7 +14,6 @@ import java.util.Map;
|
||||
name = "indexer",
|
||||
description = "Start the Kestra indexer"
|
||||
)
|
||||
@Slf4j
|
||||
public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -31,13 +28,10 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
IndexerInterface indexer = applicationContext.getBean(IndexerInterface.class);
|
||||
indexer.run();
|
||||
|
||||
log.info("Indexer started");
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.schedulers.AbstractScheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -31,12 +30,10 @@ public class SchedulerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
|
||||
scheduler.run();
|
||||
|
||||
log.info("Scheduler started");
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.services.FileChangedEventListener;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
import io.kestra.core.runners.StandAloneRunner;
|
||||
@@ -12,7 +11,6 @@ import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.File;
|
||||
@@ -25,7 +23,6 @@ import java.util.Map;
|
||||
name = "standalone",
|
||||
description = "Start the standalone all-in-one server"
|
||||
)
|
||||
@Slf4j
|
||||
public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
@@ -95,7 +92,6 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
this.startExecutorService.applyOptions(startExecutors, notStartExecutors);
|
||||
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
|
||||
if (flowPath != null) {
|
||||
try {
|
||||
@@ -124,8 +120,6 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
fileWatcher.startListeningFromConfig();
|
||||
}
|
||||
|
||||
this.shutdownHook(standAloneRunner::close);
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.runners.IndexerInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
@@ -57,20 +55,11 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
log.info("Starting an embedded indexer, this can be disabled by using `--no-indexer`.");
|
||||
poolExecutor = executorsUtils.cachedThreadPool("webserver-indexer");
|
||||
poolExecutor.execute(applicationContext.getBean(IndexerInterface.class));
|
||||
shutdownHook(false, () -> poolExecutor.shutdown());
|
||||
}
|
||||
|
||||
log.info("Webserver started");
|
||||
this.shutdownHook(() -> {
|
||||
this.close();
|
||||
KestraContext.getContext().shutdown();
|
||||
});
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void close() {
|
||||
if (this.poolExecutor != null) {
|
||||
this.poolExecutor.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.runners.Worker;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
@@ -18,7 +16,6 @@ import java.util.UUID;
|
||||
name = "worker",
|
||||
description = "Start the Kestra worker"
|
||||
)
|
||||
@Slf4j
|
||||
public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
@Inject
|
||||
@@ -40,7 +37,6 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
this.shutdownHook(() -> KestraContext.getContext().shutdown());
|
||||
if (this.workerGroupKey != null && !this.workerGroupKey.matches("[a-zA-Z0-9_-]+")) {
|
||||
throw new IllegalArgumentException("The --worker-group option must match the [a-zA-Z0-9_-]+ pattern");
|
||||
}
|
||||
@@ -52,13 +48,6 @@ public class WorkerCommand extends AbstractServerCommand {
|
||||
|
||||
worker.run();
|
||||
|
||||
if (this.workerGroupKey != null) {
|
||||
log.info("Worker started with {} thread(s) in group '{}'", this.thread, this.workerGroupKey);
|
||||
}
|
||||
else {
|
||||
log.info("Worker started with {} thread(s)", this.thread);
|
||||
}
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.repository.internal.MavenRepositorySystemUtils;
|
||||
import org.eclipse.aether.DefaultRepositorySystemSession;
|
||||
import org.eclipse.aether.RepositorySystem;
|
||||
import org.eclipse.aether.RepositorySystemSession;
|
||||
import org.eclipse.aether.artifact.Artifact;
|
||||
import org.eclipse.aether.artifact.DefaultArtifact;
|
||||
import org.eclipse.aether.connector.basic.BasicRepositoryConnectorFactory;
|
||||
import org.eclipse.aether.impl.DefaultServiceLocator;
|
||||
import org.eclipse.aether.repository.LocalRepository;
|
||||
import org.eclipse.aether.repository.RemoteRepository;
|
||||
import org.eclipse.aether.resolution.*;
|
||||
import org.eclipse.aether.spi.connector.RepositoryConnectorFactory;
|
||||
import org.eclipse.aether.spi.connector.transport.TransporterFactory;
|
||||
import org.eclipse.aether.transport.file.FileTransporterFactory;
|
||||
import org.eclipse.aether.transport.http.HttpTransporterFactory;
|
||||
import org.eclipse.aether.util.repository.AuthenticationBuilder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class PluginDownloader {
|
||||
private final List<RepositoryConfig> repositoryConfigs;
|
||||
private final RepositorySystem system;
|
||||
private final RepositorySystemSession session;
|
||||
|
||||
@Inject
|
||||
public PluginDownloader(
|
||||
List<RepositoryConfig> repositoryConfigs,
|
||||
@Nullable @Value("${kestra.plugins.local-repository-path}") String localRepositoryPath
|
||||
) {
|
||||
this.repositoryConfigs = repositoryConfigs;
|
||||
this.system = repositorySystem();
|
||||
this.session = repositorySystemSession(system, localRepositoryPath);
|
||||
}
|
||||
|
||||
public void addRepository(RepositoryConfig repositoryConfig) {
|
||||
this.repositoryConfigs.add(repositoryConfig);
|
||||
}
|
||||
|
||||
public List<URL> resolve(List<String> dependencies) throws MalformedURLException, ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<RemoteRepository> repositories = remoteRepositories();
|
||||
|
||||
List<ArtifactResult> artifactResults = resolveArtifacts(repositories, dependencies);
|
||||
List<URL> localUrls = resolveUrls(artifactResults);
|
||||
log.debug("Resolved Plugin {} with {}", dependencies, localUrls);
|
||||
|
||||
return localUrls;
|
||||
}
|
||||
|
||||
private List<RemoteRepository> remoteRepositories() {
|
||||
return repositoryConfigs
|
||||
.stream()
|
||||
.map(repositoryConfig -> {
|
||||
var build = new RemoteRepository.Builder(
|
||||
repositoryConfig.getId(),
|
||||
"default",
|
||||
repositoryConfig.getUrl()
|
||||
);
|
||||
|
||||
if (repositoryConfig.getBasicAuth() != null) {
|
||||
var authenticationBuilder = new AuthenticationBuilder();
|
||||
authenticationBuilder.addUsername(repositoryConfig.getBasicAuth().getUsername());
|
||||
authenticationBuilder.addPassword(repositoryConfig.getBasicAuth().getPassword());
|
||||
|
||||
build.setAuthentication(authenticationBuilder.build());
|
||||
}
|
||||
|
||||
return build.build();
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static RepositorySystem repositorySystem() {
|
||||
DefaultServiceLocator locator = MavenRepositorySystemUtils.newServiceLocator();
|
||||
locator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
|
||||
locator.addService(TransporterFactory.class, FileTransporterFactory.class);
|
||||
locator.addService(TransporterFactory.class, HttpTransporterFactory.class);
|
||||
|
||||
return locator.getService(RepositorySystem.class);
|
||||
}
|
||||
|
||||
private RepositorySystemSession repositorySystemSession(RepositorySystem system, String localRepositoryPath) {
|
||||
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
|
||||
|
||||
if (localRepositoryPath == null) {
|
||||
try {
|
||||
final String tempDirectory = Files.createTempDirectory(this.getClass().getSimpleName().toLowerCase())
|
||||
.toAbsolutePath()
|
||||
.toString();
|
||||
|
||||
localRepositoryPath = tempDirectory;
|
||||
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
FileUtils.deleteDirectory(new File(tempDirectory));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
LocalRepository localRepo = new LocalRepository(localRepositoryPath);
|
||||
session.setLocalRepositoryManager(system.newLocalRepositoryManager(session, localRepo));
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
private List<ArtifactResult> resolveArtifacts(List<RemoteRepository> repositories, List<String> dependencies) throws ArtifactResolutionException, VersionRangeResolutionException {
|
||||
List<ArtifactResult> results = new ArrayList<>(dependencies.size());
|
||||
for (String dependency: dependencies) {
|
||||
var artifact = new DefaultArtifact(dependency);
|
||||
var version = system.resolveVersionRange(session, new VersionRangeRequest(artifact, repositories, null));
|
||||
var artifactRequest = new ArtifactRequest(
|
||||
new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "jar", version.getHighestVersion().toString()),
|
||||
repositories,
|
||||
null
|
||||
);
|
||||
var artifactResult = system.resolveArtifact(session, artifactRequest);
|
||||
results.add(artifactResult);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<URL> resolveUrls(List<ArtifactResult> artifactResults) throws MalformedURLException {
|
||||
ImmutableList.Builder<URL> urls = ImmutableList.builder();
|
||||
for (ArtifactResult artifactResult : artifactResults) {
|
||||
URL url;
|
||||
url = artifactResult.getArtifact().getFile().toPath().toUri().toURL();
|
||||
urls.add(url);
|
||||
}
|
||||
return urls.build();
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
package io.kestra.cli.plugins;
|
||||
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class RepositoryConfig {
|
||||
String id;
|
||||
|
||||
String url;
|
||||
|
||||
BasicAuth basicAuth;
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public static class BasicAuth {
|
||||
private String username;
|
||||
private String password;
|
||||
}
|
||||
|
||||
public RepositoryConfig(@Parameter String id) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
|
||||
@@ -36,6 +37,9 @@ public class FileChangedEventListener {
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepositoryInterface;
|
||||
|
||||
@Inject
|
||||
private PluginDefaultService pluginDefaultService;
|
||||
|
||||
@Inject
|
||||
private YamlParser yamlParser;
|
||||
|
||||
@@ -64,7 +68,7 @@ public class FileChangedEventListener {
|
||||
|
||||
public void startListeningFromConfig() throws IOException, InterruptedException {
|
||||
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface, pluginDefaultService);
|
||||
List<Path> paths = fileWatchConfiguration.getPaths();
|
||||
this.setup(paths);
|
||||
|
||||
@@ -107,7 +111,6 @@ public class FileChangedEventListener {
|
||||
} else {
|
||||
log.info("File watching is disabled.");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void startListening(List<Path> paths) throws IOException, InterruptedException {
|
||||
@@ -118,60 +121,64 @@ public class FileChangedEventListener {
|
||||
WatchKey key;
|
||||
while ((key = watchService.take()) != null) {
|
||||
for (WatchEvent<?> watchEvent : key.pollEvents()) {
|
||||
WatchEvent.Kind<?> kind = watchEvent.kind();
|
||||
Path entry = (Path) watchEvent.context();
|
||||
try {
|
||||
WatchEvent.Kind<?> kind = watchEvent.kind();
|
||||
Path entry = (Path) watchEvent.context();
|
||||
|
||||
if (entry.toString().endsWith(".yml") || entry.toString().endsWith(".yaml")) {
|
||||
if (entry.toString().endsWith(".yml") || entry.toString().endsWith(".yaml")) {
|
||||
|
||||
if (kind == StandardWatchEventKinds.ENTRY_CREATE || kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
if (kind == StandardWatchEventKinds.ENTRY_CREATE || kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
|
||||
Path filePath = ((Path) key.watchable()).resolve(entry);
|
||||
if (Files.isDirectory(filePath)) {
|
||||
loadFlowsFromFolder(filePath);
|
||||
} else {
|
||||
Path filePath = ((Path) key.watchable()).resolve(entry);
|
||||
if (Files.isDirectory(filePath)) {
|
||||
loadFlowsFromFolder(filePath);
|
||||
} else {
|
||||
|
||||
try {
|
||||
String content = Files.readString(filePath, Charset.defaultCharset());
|
||||
try {
|
||||
String content = Files.readString(filePath, Charset.defaultCharset());
|
||||
|
||||
Optional<Flow> flow = parseFlow(content, entry);
|
||||
if (flow.isPresent()) {
|
||||
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
// Check if we already have a file with the given path
|
||||
if (flows.stream().anyMatch(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()))) {
|
||||
Optional<FlowWithPath> previous = flows.stream().filter(flowWithPath -> flowWithPath.getPath().equals(filePath.toString())).findFirst();
|
||||
// Check if Flow from file has id/namespace updated
|
||||
if (previous.isPresent() && !previous.get().uidWithoutRevision().equals(flow.get().uidWithoutRevision())) {
|
||||
flows.removeIf(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()));
|
||||
flowFilesManager.deleteFlow(previous.get().getTenantId(), previous.get().getNamespace(), previous.get().getId());
|
||||
Optional<Flow> flow = parseFlow(content, entry);
|
||||
if (flow.isPresent()) {
|
||||
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
|
||||
// Check if we already have a file with the given path
|
||||
if (flows.stream().anyMatch(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()))) {
|
||||
Optional<FlowWithPath> previous = flows.stream().filter(flowWithPath -> flowWithPath.getPath().equals(filePath.toString())).findFirst();
|
||||
// Check if Flow from file has id/namespace updated
|
||||
if (previous.isPresent() && !previous.get().uidWithoutRevision().equals(flow.get().uidWithoutRevision())) {
|
||||
flows.removeIf(flowWithPath -> flowWithPath.getPath().equals(filePath.toString()));
|
||||
flowFilesManager.deleteFlow(previous.get().getTenantId(), previous.get().getNamespace(), previous.get().getId());
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
}
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
|
||||
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
|
||||
}
|
||||
|
||||
flowFilesManager.createOrUpdateFlow(flow.get(), content);
|
||||
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
|
||||
} catch (NoSuchFileException e) {
|
||||
log.error("File not found: {}", entry, e);
|
||||
} catch (IOException e) {
|
||||
log.error("Error reading file: {}", entry, e);
|
||||
}
|
||||
|
||||
} catch (NoSuchFileException e) {
|
||||
log.error("File not found: {}", entry, e);
|
||||
} catch (IOException e) {
|
||||
log.error("Error reading file: {}", entry, e);
|
||||
}
|
||||
} else {
|
||||
Path filePath = ((Path) key.watchable()).resolve(entry);
|
||||
flows.stream()
|
||||
.filter(flow -> flow.getPath().equals(filePath.toString()))
|
||||
.findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
|
||||
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
|
||||
});
|
||||
}
|
||||
} else {
|
||||
Path filePath = ((Path) key.watchable()).resolve(entry);
|
||||
flows.stream()
|
||||
.filter(flow -> flow.getPath().equals(filePath.toString()))
|
||||
.findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
|
||||
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
|
||||
});
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Unexpected error while watching flows", e);
|
||||
}
|
||||
}
|
||||
key.reset();
|
||||
@@ -230,7 +237,8 @@ public class FileChangedEventListener {
|
||||
private Optional<Flow> parseFlow(String content, Path entry) {
|
||||
try {
|
||||
Flow flow = yamlParser.parse(content, Flow.class);
|
||||
modelValidator.validate(flow);
|
||||
FlowWithSource withPluginDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
modelValidator.validate(withPluginDefault);
|
||||
return Optional.of(flow);
|
||||
} catch (ConstraintViolationException e) {
|
||||
log.warn("Error while parsing flow: {}", entry, e);
|
||||
|
||||
@@ -3,32 +3,36 @@ package io.kestra.cli.services;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Requires(property = "micronaut.io.watch.enabled", value = "true")
|
||||
@Slf4j
|
||||
public class LocalFlowFileWatcher implements FlowFilesManager {
|
||||
private FlowRepositoryInterface flowRepositoryInterface;
|
||||
private final FlowRepositoryInterface flowRepository;
|
||||
private final PluginDefaultService pluginDefaultService;
|
||||
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepositoryInterface) {
|
||||
this.flowRepositoryInterface = flowRepositoryInterface;
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository, PluginDefaultService pluginDefaultService) {
|
||||
this.flowRepository = flowRepository;
|
||||
this.pluginDefaultService = pluginDefaultService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowWithSource createOrUpdateFlow(Flow flow, String content) {
|
||||
return flowRepositoryInterface.findById(null, flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepositoryInterface.update(flow, previous, content, flow))
|
||||
.orElseGet(() -> flowRepositoryInterface.create(flow, content, flow));
|
||||
FlowWithSource withDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
|
||||
return flowRepository.findById(null, flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous, content, withDefault))
|
||||
.orElseGet(() -> flowRepository.create(flow, content, withDefault));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFlow(FlowWithSource toDelete) {
|
||||
flowRepositoryInterface.findByIdWithSource(toDelete.getTenantId(), toDelete.getNamespace(), toDelete.getId()).ifPresent(flowRepositoryInterface::delete);
|
||||
log.error("Flow {} has been deleted", toDelete.getId());
|
||||
flowRepository.findByIdWithSource(toDelete.getTenantId(), toDelete.getNamespace(), toDelete.getId()).ifPresent(flowRepository::delete);
|
||||
log.info("Flow {} has been deleted", toDelete.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFlow(String tenantId, String namespace, String id) {
|
||||
flowRepositoryInterface.findByIdWithSource(tenantId, namespace, id).ifPresent(flowRepositoryInterface::delete);
|
||||
log.error("Flow {} has been deleted", id);
|
||||
flowRepository.findByIdWithSource(tenantId, namespace, id).ifPresent(flowRepository::delete);
|
||||
log.info("Flow {} has been deleted", id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
micronaut:
|
||||
application:
|
||||
name: kestra
|
||||
# Disable Micronaut Open Telemetry
|
||||
otel:
|
||||
enabled: false
|
||||
router:
|
||||
static-resources:
|
||||
swagger:
|
||||
@@ -71,6 +74,13 @@ micronaut:
|
||||
type: scheduled
|
||||
core-pool-size: 1
|
||||
|
||||
# Disable OpenTelemetry metrics by default, users that need it must enable it and configure the collector URL.
|
||||
metrics:
|
||||
export:
|
||||
otlp:
|
||||
enabled: false
|
||||
# url: http://localhost:4318/v1/metrics
|
||||
|
||||
jackson:
|
||||
serialization:
|
||||
writeDatesAsTimestamps: false
|
||||
@@ -135,6 +145,11 @@ kestra:
|
||||
initial-delay: 1h
|
||||
fixed-delay: 1h
|
||||
retention: 7d
|
||||
types:
|
||||
- type : io.kestra.core.models.executions.LogEntry
|
||||
retention: 1h
|
||||
- type: io.kestra.core.models.executions.MetricEntry
|
||||
retention: 1h
|
||||
|
||||
plugins:
|
||||
repositories:
|
||||
|
||||
@@ -45,4 +45,20 @@ class AppTest {
|
||||
assertThat(out.toString(), startsWith("Usage: kestra server " + serverType));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void missingRequiredParamsPrintHelpInsteadOfException() {
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
|
||||
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString(), startsWith("Missing required parameters: "));
|
||||
assertThat(out.toString(), containsString("Usage: kestra flow namespace update "));
|
||||
assertThat(out.toString(), not(containsString("MissingParameterException: ")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junitpioneer.jupiter.RetryingTest;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
@@ -15,7 +16,7 @@ import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class FlowCreateOrUpdateCommandTest {
|
||||
@Test
|
||||
@RetryingTest(5) // flaky on CI but cannot be reproduced even with 100 repetitions
|
||||
void runWithDelete() {
|
||||
URL directory = FlowCreateOrUpdateCommandTest.class.getClassLoader().getResource("flows");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
|
||||
@@ -109,6 +109,33 @@ class FlowUpdatesCommandTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalidWithNamespace() {
|
||||
URL directory = FlowUpdatesCommandTest.class.getClassLoader().getResource("flows");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"--namespace",
|
||||
"io.kestra.cli",
|
||||
"--delete",
|
||||
directory.getPath(),
|
||||
};
|
||||
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString(), containsString("Invalid entity: flow.namespace: io.kestra.outsider_quattro_-1 - flow namespace is invalid"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void helper() {
|
||||
URL directory = FlowUpdatesCommandTest.class.getClassLoader().getResource("helper");
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.StringContains.containsString;
|
||||
|
||||
class PluginCommandTest {
|
||||
|
||||
@Test
|
||||
void shouldGetHelps() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
PicocliRunner.call(PluginCommand.class, ctx);
|
||||
|
||||
assertThat(out.toString(), containsString("Usage: kestra plugins"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,6 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
@@ -17,7 +16,7 @@ import static org.hamcrest.Matchers.*;
|
||||
class PluginInstallCommandTest {
|
||||
|
||||
@Test
|
||||
void fixedVersion() throws IOException {
|
||||
void shouldInstallPluginLocallyGivenFixedVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -28,12 +27,12 @@ class PluginInstallCommandTest {
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("plugin-notifications-0.6.0.jar"));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("io_kestra_plugin__plugin-notifications__0_6_0.jar"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void latestVersion() throws IOException {
|
||||
void shouldInstallPluginLocallyGivenLatestVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -44,13 +43,13 @@ class PluginInstallCommandTest {
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), startsWith("plugin-notifications"));
|
||||
assertThat(files.getFirst().getFileName().toString(), startsWith("io_kestra_plugin__plugin-notifications__"));
|
||||
assertThat(files.getFirst().getFileName().toString(), not(containsString("LATEST")));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void rangeVersion() throws IOException {
|
||||
void shouldInstallPluginLocallyGivenRangeVersion() throws IOException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
@@ -62,7 +61,7 @@ class PluginInstallCommandTest {
|
||||
List<Path> files = Files.list(pluginsPath).toList();
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("storage-s3-0.12.1.jar"));
|
||||
assertThat(files.getFirst().getFileName().toString(), is("io_kestra_storage__storage-s3__0_12_1.jar"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
@@ -25,7 +24,7 @@ class PluginListCommandTest {
|
||||
private static final String PLUGIN_TEMPLATE_TEST = "plugin-template-test-0.18.0-SNAPSHOT.jar";
|
||||
|
||||
@Test
|
||||
void run() throws IOException, URISyntaxException {
|
||||
void shouldListPluginsInstalledLocally() throws IOException, URISyntaxException {
|
||||
Path pluginsPath = Files.createTempDirectory(PluginListCommandTest.class.getSimpleName());
|
||||
pluginsPath.toFile().deleteOnExit();
|
||||
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
package io.kestra.cli.commands.plugins;
|
||||
|
||||
import com.github.tomakehurst.wiremock.junit5.WireMockTest;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.github.tomakehurst.wiremock.client.WireMock.*;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
@WireMockTest(httpPort = 28181)
|
||||
class PluginSearchCommandTest {
|
||||
private ByteArrayOutputStream outputStreamCaptor;
|
||||
private final PrintStream originalOut = System.out;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
outputStreamCaptor = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(outputStreamCaptor));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void tearDown() {
|
||||
System.setOut(originalOut);
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchWithExactMatch() {
|
||||
stubFor(get(urlEqualTo("/v1/plugins"))
|
||||
.willReturn(aResponse()
|
||||
.withHeader("Content-Type", "application/json")
|
||||
.withBody("""
|
||||
[
|
||||
{
|
||||
"name": "plugin-notifications",
|
||||
"title": "Notifications",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.6.0"
|
||||
},
|
||||
{
|
||||
"name": "plugin-scripts",
|
||||
"title": "Scripts",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.5.0"
|
||||
}
|
||||
]
|
||||
""")));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder(Environment.CLI, Environment.TEST)
|
||||
.properties(Map.of("micronaut.http.services.api.url", "http://localhost:28181"))
|
||||
.start()) {
|
||||
String[] args = {"notifications"};
|
||||
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
|
||||
|
||||
String output = outputStreamCaptor.toString().trim();
|
||||
assertThat(output, containsString("Found 1 plugins matching 'notifications'"));
|
||||
assertThat(output, containsString("plugin-notifications"));
|
||||
assertThat(output, not(containsString("plugin-scripts")));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchWithEmptyQuery() {
|
||||
stubFor(get(urlEqualTo("/v1/plugins"))
|
||||
.willReturn(aResponse()
|
||||
.withHeader("Content-Type", "application/json")
|
||||
.withBody("""
|
||||
[
|
||||
{
|
||||
"name": "plugin-notifications",
|
||||
"title": "Notifications",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.6.0"
|
||||
},
|
||||
{
|
||||
"name": "plugin-scripts",
|
||||
"title": "Scripts",
|
||||
"group": "io.kestra.plugin",
|
||||
"version": "0.5.0"
|
||||
}
|
||||
]
|
||||
""")));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder(Environment.CLI, Environment.TEST)
|
||||
.properties(Map.of("micronaut.http.services.api.url", "http://localhost:28181"))
|
||||
.start()) {
|
||||
|
||||
String[] args = {""};
|
||||
PicocliRunner.call(PluginSearchCommand.class, ctx, args);
|
||||
|
||||
String output = outputStreamCaptor.toString().trim();
|
||||
assertThat(output, containsString("Found 2 plugins"));
|
||||
assertThat(output, containsString("plugin-notifications"));
|
||||
assertThat(output, containsString("plugin-scripts"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.junitpioneer.jupiter.RetryingTest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
@MicronautTest(environments = {"test", "file-watch"}, transactional = false)
|
||||
class FileChangedEventListenerTest {
|
||||
public static final String FILE_WATCH = "build/file-watch";
|
||||
@Inject
|
||||
private FileChangedEventListener fileWatcher;
|
||||
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
private final ExecutorService executorService = Executors.newSingleThreadExecutor();
|
||||
private final AtomicBoolean started = new AtomicBoolean(false);
|
||||
|
||||
@BeforeAll
|
||||
static void setup() throws IOException {
|
||||
if (!Files.exists(Path.of(FILE_WATCH))) {
|
||||
Files.createDirectories(Path.of(FILE_WATCH));
|
||||
}
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
static void tearDown() throws IOException {
|
||||
if (Files.exists(Path.of(FILE_WATCH))) {
|
||||
FileUtils.deleteDirectory(Path.of(FILE_WATCH).toFile());
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() throws Exception {
|
||||
if (started.compareAndSet(false, true)) {
|
||||
executorService.execute(throwRunnable(() -> fileWatcher.startListeningFromConfig()));
|
||||
}
|
||||
}
|
||||
|
||||
@RetryingTest(5) // Flaky on CI but always pass locally
|
||||
void test() throws IOException, TimeoutException {
|
||||
// remove the flow if it already exists
|
||||
flowRepository.findByIdWithSource(null, "io.kestra.tests.watch", "myflow").ifPresent(flow -> flowRepository.delete(flow));
|
||||
|
||||
// create a basic flow
|
||||
String flow = """
|
||||
id: myflow
|
||||
namespace: io.kestra.tests.watch
|
||||
|
||||
tasks:
|
||||
- id: hello
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Hello World! 🚀
|
||||
""";
|
||||
Files.write(Path.of(FILE_WATCH + "/myflow.yaml"), flow.getBytes());
|
||||
Await.until(
|
||||
() -> flowRepository.findById(null, "io.kestra.tests.watch", "myflow").isPresent(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Flow myflow = flowRepository.findById(null, "io.kestra.tests.watch", "myflow").orElseThrow();
|
||||
assertThat(myflow.getTasks(), hasSize(1));
|
||||
assertThat(myflow.getTasks().getFirst().getId(), is("hello"));
|
||||
assertThat(myflow.getTasks().getFirst().getType(), is("io.kestra.plugin.core.log.Log"));
|
||||
|
||||
// delete the flow
|
||||
Files.delete(Path.of(FILE_WATCH + "/myflow.yaml"));
|
||||
Await.until(
|
||||
() -> flowRepository.findById(null, "io.kestra.tests.watch", "myflow").isEmpty(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
}
|
||||
|
||||
@RetryingTest(5) // Flaky on CI but always pass locally
|
||||
void testWithPluginDefault() throws IOException, TimeoutException {
|
||||
// remove the flow if it already exists
|
||||
flowRepository.findByIdWithSource(null, "io.kestra.tests.watch", "pluginDefault").ifPresent(flow -> flowRepository.delete(flow));
|
||||
|
||||
// create a flow with plugin default
|
||||
String pluginDefault = """
|
||||
id: pluginDefault
|
||||
namespace: io.kestra.tests.watch
|
||||
|
||||
tasks:
|
||||
- id: helloWithDefault
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
|
||||
pluginDefaults:
|
||||
- type: io.kestra.plugin.core.log.Log
|
||||
values:
|
||||
message: Hello World!
|
||||
""";
|
||||
Files.write(Path.of(FILE_WATCH + "/plugin-default.yaml"), pluginDefault.getBytes());
|
||||
Await.until(
|
||||
() -> flowRepository.findById(null, "io.kestra.tests.watch", "pluginDefault").isPresent(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Flow pluginDefaultFlow = flowRepository.findById(null, "io.kestra.tests.watch", "pluginDefault").orElseThrow();
|
||||
assertThat(pluginDefaultFlow.getTasks(), hasSize(1));
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getId(), is("helloWithDefault"));
|
||||
assertThat(pluginDefaultFlow.getTasks().getFirst().getType(), is("io.kestra.plugin.core.log.Log"));
|
||||
|
||||
// delete both files
|
||||
Files.delete(Path.of(FILE_WATCH + "/plugin-default.yaml"));
|
||||
Await.until(
|
||||
() -> flowRepository.findById(null, "io.kestra.tests.watch", "pluginDefault").isEmpty(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
}
|
||||
}
|
||||
12
cli/src/test/resources/application-file-watch.yml
Normal file
12
cli/src/test/resources/application-file-watch.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
micronaut:
|
||||
io:
|
||||
watch:
|
||||
enabled: true
|
||||
paths:
|
||||
- build/file-watch
|
||||
|
||||
kestra:
|
||||
repository:
|
||||
type: memory
|
||||
queue:
|
||||
type: memory
|
||||
76
codecov.yml
Normal file
76
codecov.yml
Normal file
@@ -0,0 +1,76 @@
|
||||
component_management:
|
||||
individual_components:
|
||||
- component_id: cli
|
||||
name: Cli
|
||||
paths:
|
||||
- cli/**
|
||||
- component_id: core
|
||||
name: Core
|
||||
paths:
|
||||
- core/**
|
||||
- component_id: e2e-tests
|
||||
name: End to End
|
||||
paths:
|
||||
- e2e-tests/**
|
||||
- component_id: jdbc
|
||||
name: Jdbc
|
||||
paths:
|
||||
- jdbc/**
|
||||
- component_id: jdbc-h2
|
||||
name: Jdbc H2
|
||||
paths:
|
||||
- jdbc-h2/**
|
||||
- component_id: jdbc-mysql
|
||||
name: Jdbc Mysql
|
||||
paths:
|
||||
- jdbc-mysql/**
|
||||
- component_id: jdbc-postgres
|
||||
name: Jdbc Postgres
|
||||
paths:
|
||||
- jdbc-postgres/**
|
||||
- component_id: model
|
||||
name: Model
|
||||
paths:
|
||||
- model/**
|
||||
- component_id: processor
|
||||
name: Processor
|
||||
paths:
|
||||
- processor/**
|
||||
- component_id: repository-memory
|
||||
name: Repository Memory
|
||||
paths:
|
||||
- repository-memory/**
|
||||
- component_id: runner-memory
|
||||
name: Runner Memory
|
||||
paths:
|
||||
- runner-memory/**
|
||||
- component_id: script
|
||||
name: Script
|
||||
paths:
|
||||
- script/**
|
||||
- component_id: storage-local
|
||||
name: Storage Local
|
||||
paths:
|
||||
- storage-local/**
|
||||
- component_id: tests
|
||||
name: Tests
|
||||
paths:
|
||||
- tests/**
|
||||
- component_id: ui
|
||||
name: Ui
|
||||
paths:
|
||||
- ui/**
|
||||
- component_id: webserver
|
||||
name: Webserver
|
||||
paths:
|
||||
- webserver/**
|
||||
|
||||
flag_management:
|
||||
default_rules:
|
||||
carryforward: true
|
||||
statuses:
|
||||
- type: project
|
||||
target: 80%
|
||||
threshold: 1%
|
||||
- type: patch
|
||||
target: 90%
|
||||
@@ -38,6 +38,13 @@ dependencies {
|
||||
implementation group: 'dev.failsafe', name: 'failsafe'
|
||||
api 'org.apache.httpcomponents.client5:httpclient5'
|
||||
|
||||
// plugins
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-impl'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-supplier'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-connector-basic'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-transport-file'
|
||||
implementation 'org.apache.maven.resolver:maven-resolver-transport-http'
|
||||
|
||||
// scheduler
|
||||
implementation group: 'com.cronutils', name: 'cron-utils'
|
||||
|
||||
@@ -66,7 +73,7 @@ dependencies {
|
||||
testImplementation "io.micronaut:micronaut-http-server-netty"
|
||||
testImplementation "io.micronaut:micronaut-management"
|
||||
|
||||
testImplementation "org.testcontainers:testcontainers:1.20.4"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.20.4"
|
||||
testImplementation "org.testcontainers:testcontainers:1.20.6"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.20.6"
|
||||
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.80"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package io.kestra.core.contexts;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.plugins.DefaultPluginRegistry;
|
||||
import io.kestra.core.plugins.PluginRegistry;
|
||||
import io.kestra.core.plugins.serdes.PluginDeserializer;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.StorageInterfaceFactory;
|
||||
import io.micronaut.context.annotation.Bean;
|
||||
@@ -34,7 +33,7 @@ public class KestraBeansFactory {
|
||||
StorageConfig storageConfig;
|
||||
|
||||
@Value("${kestra.storage.type}")
|
||||
Optional<String> storageType;
|
||||
protected Optional<String> storageType;
|
||||
|
||||
@Requires(missingBeans = PluginRegistry.class)
|
||||
@Singleton
|
||||
@@ -42,16 +41,25 @@ public class KestraBeansFactory {
|
||||
return DefaultPluginRegistry.getOrCreate();
|
||||
}
|
||||
|
||||
@Singleton
|
||||
public StorageInterfaceFactory storageInterfaceFactory(final PluginRegistry pluginRegistry){
|
||||
return new StorageInterfaceFactory(pluginRegistry, validator);
|
||||
}
|
||||
|
||||
@Requires(missingBeans = StorageInterface.class)
|
||||
@Singleton
|
||||
@Bean(preDestroy = "close")
|
||||
public StorageInterface storageInterface(final PluginRegistry pluginRegistry) throws IOException {
|
||||
String pluginId = storageType.orElseThrow(() -> new KestraRuntimeException(String.format(
|
||||
public StorageInterface storageInterface(final StorageInterfaceFactory storageInterfaceFactory) throws IOException {
|
||||
String pluginId = getStoragePluginId(storageInterfaceFactory);
|
||||
return storageInterfaceFactory.make(null, pluginId, storageConfig.getStorageConfig(pluginId));
|
||||
}
|
||||
|
||||
public String getStoragePluginId(StorageInterfaceFactory storageInterfaceFactory) {
|
||||
return storageType.orElseThrow(() -> new KestraRuntimeException(String.format(
|
||||
"No storage configured through the application property '%s'. Supported types are: %s"
|
||||
, KESTRA_STORAGE_TYPE_CONFIG,
|
||||
StorageInterfaceFactory.getLoggableStorageIds(pluginRegistry)
|
||||
storageInterfaceFactory.getLoggableStorageIds()
|
||||
)));
|
||||
return StorageInterfaceFactory.make(pluginRegistry, pluginId, storageConfig.getStorageConfig(pluginId), validator);
|
||||
}
|
||||
|
||||
@ConfigurationProperties("kestra")
|
||||
@@ -67,7 +75,7 @@ public class KestraBeansFactory {
|
||||
* @return the configuration.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<String, Object> getStorageConfig(String type) {
|
||||
public Map<String, Object> getStorageConfig(String type) {
|
||||
return (Map<String, Object>) storage.get(StringConvention.CAMEL_CASE.format(type));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ public abstract class KestraContext {
|
||||
private static final AtomicReference<KestraContext> INSTANCE = new AtomicReference<>();
|
||||
|
||||
// Properties
|
||||
private static final String KESTRA_SERVER_TYPE = "kestra.server-type";
|
||||
public static final String KESTRA_SERVER_TYPE = "kestra.server-type";
|
||||
|
||||
/**
|
||||
* Gets the current {@link KestraContext}.
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
package io.kestra.core.contexts;
|
||||
|
||||
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||
import io.micronaut.context.annotation.EachProperty;
|
||||
import io.micronaut.context.annotation.Parameter;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import lombok.Builder;
|
||||
|
||||
@Builder
|
||||
@EachProperty("kestra.plugins.repositories")
|
||||
public record MavenPluginRepositoryConfig(
|
||||
@Parameter
|
||||
String id,
|
||||
String url,
|
||||
@Nullable
|
||||
BasicAuth basicAuth
|
||||
) {
|
||||
|
||||
@Builder
|
||||
@ConfigurationProperties("basic-auth")
|
||||
public record BasicAuth(
|
||||
String username,
|
||||
String password
|
||||
) {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.google.common.base.CaseFormat;
|
||||
import io.kestra.core.models.Plugin;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
import io.kestra.core.models.tasks.runners.TaskRunner;
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.plugins.PluginClassAndMetadata;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Getter
|
||||
@EqualsAndHashCode
|
||||
@@ -21,16 +20,18 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
private Map<String, Object> outputsSchema;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private ClassPluginDocumentation(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls, String alias) {
|
||||
super(jsonSchemaGenerator, cls, baseCls);
|
||||
private ClassPluginDocumentation(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
|
||||
super(jsonSchemaGenerator, plugin.type(), allProperties ? null : plugin.baseClass());
|
||||
|
||||
// plugins metadata
|
||||
this.cls = alias == null ? cls.getName() : alias;
|
||||
Class<? extends T> cls = plugin.type();
|
||||
|
||||
this.cls = plugin.alias() == null ? cls.getName() : plugin.alias();
|
||||
this.group = plugin.group();
|
||||
this.docLicense = plugin.license();
|
||||
this.pluginTitle = plugin.title();
|
||||
this.icon = plugin.icon(cls);
|
||||
if (alias != null) {
|
||||
this.icon = plugin.icon();
|
||||
if (plugin.alias() != null) {
|
||||
replacement = cls.getName();
|
||||
}
|
||||
|
||||
@@ -38,10 +39,10 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
this.subGroup = cls.getPackageName().substring(this.group.length() + 1);
|
||||
}
|
||||
|
||||
this.shortName = alias == null ? cls.getSimpleName() : alias.substring(alias.lastIndexOf('.') + 1);
|
||||
this.shortName = plugin.alias() == null ? cls.getSimpleName() : plugin.alias().substring(plugin.alias().lastIndexOf('.') + 1);
|
||||
|
||||
// outputs
|
||||
this.outputsSchema = jsonSchemaGenerator.outputs(baseCls, cls);
|
||||
this.outputsSchema = jsonSchemaGenerator.outputs(allProperties ? null : plugin.baseClass(), cls);
|
||||
|
||||
if (this.outputsSchema.containsKey("$defs")) {
|
||||
this.defs.putAll((Map<String, Object>) this.outputsSchema.get("$defs"));
|
||||
@@ -67,17 +68,13 @@ public class ClassPluginDocumentation<T> extends AbstractClassDocumentation<T> {
|
||||
.toList();
|
||||
}
|
||||
|
||||
if (alias != null) {
|
||||
if (plugin.alias() != null) {
|
||||
this.deprecated = true;
|
||||
}
|
||||
}
|
||||
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, cls, baseCls, null);
|
||||
}
|
||||
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, RegisteredPlugin plugin, Class<? extends T> cls, Class<T> baseCls, String alias) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, cls, baseCls, alias);
|
||||
public static <T> ClassPluginDocumentation<T> of(JsonSchemaGenerator jsonSchemaGenerator, PluginClassAndMetadata<T> plugin, boolean allProperties) {
|
||||
return new ClassPluginDocumentation<>(jsonSchemaGenerator, plugin, allProperties);
|
||||
}
|
||||
|
||||
@AllArgsConstructor
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.annotations.PluginSubGroup;
|
||||
import io.kestra.core.models.conditions.Condition;
|
||||
@@ -8,6 +7,7 @@ import io.kestra.core.models.tasks.logs.LogExporter;
|
||||
import io.kestra.core.models.tasks.runners.TaskRunner;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.plugins.PluginClassAndMetadata;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.kestra.core.runners.pebble.Extension;
|
||||
import io.kestra.core.runners.pebble.JsonWriter;
|
||||
@@ -29,6 +29,7 @@ import org.apache.commons.io.IOUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -37,7 +38,7 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
@Singleton
|
||||
public class DocumentationGenerator {
|
||||
private static PebbleEngine pebbleEngine;
|
||||
private static final PebbleEngine PEBBLE_ENGINE;
|
||||
|
||||
@Inject
|
||||
JsonSchemaGenerator jsonSchemaGenerator;
|
||||
@@ -46,7 +47,7 @@ public class DocumentationGenerator {
|
||||
ClasspathLoader classpathLoader = new ClasspathLoader();
|
||||
classpathLoader.setPrefix("docs/");
|
||||
|
||||
pebbleEngine = new PebbleEngine.Builder()
|
||||
PEBBLE_ENGINE = new PebbleEngine.Builder()
|
||||
.newLineTrimming(false)
|
||||
.loader(classpathLoader)
|
||||
.extension(new AbstractExtension() {
|
||||
@@ -62,6 +63,7 @@ public class DocumentationGenerator {
|
||||
.build();
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public List<Document> generate(RegisteredPlugin registeredPlugin) throws Exception {
|
||||
ArrayList<Document> result = new ArrayList<>();
|
||||
|
||||
@@ -72,7 +74,7 @@ public class DocumentationGenerator {
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getConditions(), Condition.class, "conditions"));
|
||||
//noinspection unchecked
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getTaskRunners(), (Class) TaskRunner.class, "task-runners"));
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getLogExporters(), LogExporter.class, "log-exporters"));
|
||||
result.addAll(this.generate(registeredPlugin, registeredPlugin.getLogExporters(), (Class) LogExporter.class, "log-exporters"));
|
||||
|
||||
result.addAll(guides(registeredPlugin));
|
||||
|
||||
@@ -216,7 +218,15 @@ public class DocumentationGenerator {
|
||||
private <T> List<Document> generate(RegisteredPlugin registeredPlugin, List<Class<? extends T>> cls, Class<T> baseCls, String type) {
|
||||
return cls
|
||||
.stream()
|
||||
.map(r -> ClassPluginDocumentation.of(jsonSchemaGenerator, registeredPlugin, r, baseCls))
|
||||
.map(pluginClass -> {
|
||||
PluginClassAndMetadata<T> metadata = PluginClassAndMetadata.create(
|
||||
registeredPlugin,
|
||||
pluginClass,
|
||||
baseCls,
|
||||
null
|
||||
);
|
||||
return ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, true);
|
||||
})
|
||||
.map(pluginDocumentation -> {
|
||||
try {
|
||||
return new Document(
|
||||
@@ -246,21 +256,21 @@ public class DocumentationGenerator {
|
||||
classPluginDocumentation.getCls() + ".md";
|
||||
}
|
||||
|
||||
public static <T> String render(ClassPluginDocumentation<T> classPluginDocumentation) throws IOException {
|
||||
public static String render(ClassPluginDocumentation<?> classPluginDocumentation) throws IOException {
|
||||
return render("task", JacksonMapper.toMap(classPluginDocumentation));
|
||||
}
|
||||
|
||||
public static <T> String render(AbstractClassDocumentation<T> classInputDocumentation) throws IOException {
|
||||
public static String render(AbstractClassDocumentation classInputDocumentation) throws IOException {
|
||||
return render("task", JacksonMapper.toMap(classInputDocumentation));
|
||||
}
|
||||
|
||||
public static <T> String render(String templateName, Map<String, Object> vars) throws IOException {
|
||||
public static String render(String templateName, Map<String, Object> vars) throws IOException {
|
||||
String pebbleTemplate = IOUtils.toString(
|
||||
Objects.requireNonNull(DocumentationGenerator.class.getClassLoader().getResourceAsStream("docs/" + templateName + ".peb")),
|
||||
Charsets.UTF_8
|
||||
StandardCharsets.UTF_8
|
||||
);
|
||||
|
||||
PebbleTemplate compiledTemplate = pebbleEngine.getLiteralTemplate(pebbleTemplate);
|
||||
PebbleTemplate compiledTemplate = PEBBLE_ENGINE.getLiteralTemplate(pebbleTemplate);
|
||||
|
||||
Writer writer = new JsonWriter();
|
||||
compiledTemplate.evaluate(writer, vars);
|
||||
|
||||
65
core/src/main/java/io/kestra/core/docs/JsonSchemaCache.java
Normal file
65
core/src/main/java/io/kestra/core/docs/JsonSchemaCache.java
Normal file
@@ -0,0 +1,65 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import io.kestra.core.models.dashboards.Dashboard;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.PluginDefault;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
* Service for getting schemas.
|
||||
*/
|
||||
@Singleton
|
||||
public class JsonSchemaCache {
|
||||
|
||||
private final JsonSchemaGenerator jsonSchemaGenerator;
|
||||
|
||||
private final ConcurrentMap<CacheKey, Map<String, Object>> schemaCache = new ConcurrentHashMap<>();
|
||||
|
||||
private final Map<SchemaType, Class<?>> classesBySchemaType = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Creates a new {@link JsonSchemaCache} instance.
|
||||
*
|
||||
* @param jsonSchemaGenerator The {@link JsonSchemaGenerator}.
|
||||
*/
|
||||
public JsonSchemaCache(final JsonSchemaGenerator jsonSchemaGenerator) {
|
||||
this.jsonSchemaGenerator = Objects.requireNonNull(jsonSchemaGenerator, "JsonSchemaGenerator cannot be null");
|
||||
registerClassForType(SchemaType.FLOW, Flow.class);
|
||||
registerClassForType(SchemaType.TEMPLATE, Template.class);
|
||||
registerClassForType(SchemaType.TASK, Task.class);
|
||||
registerClassForType(SchemaType.TRIGGER, AbstractTrigger.class);
|
||||
registerClassForType(SchemaType.PLUGINDEFAULT, PluginDefault.class);
|
||||
registerClassForType(SchemaType.DASHBOARD, Dashboard.class);
|
||||
}
|
||||
|
||||
public Map<String, Object> getSchemaForType(final SchemaType type,
|
||||
final boolean arrayOf) {
|
||||
return schemaCache.computeIfAbsent(new CacheKey(type, arrayOf), (key) -> {
|
||||
|
||||
Class<?> cls = Optional.ofNullable(classesBySchemaType.get(type))
|
||||
.orElseThrow(() -> new IllegalArgumentException("Cannot found schema for type '" + type + "'"));
|
||||
return jsonSchemaGenerator.schemas(cls, arrayOf);
|
||||
});
|
||||
}
|
||||
|
||||
public void registerClassForType(final SchemaType type, final Class<?> clazz) {
|
||||
classesBySchemaType.put(type, clazz);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
schemaCache.clear();
|
||||
}
|
||||
|
||||
private record CacheKey(SchemaType type, boolean arrayOf) {
|
||||
}
|
||||
}
|
||||
@@ -82,6 +82,7 @@ public class JsonSchemaGenerator {
|
||||
}
|
||||
replaceAnyOfWithOneOf(objectNode);
|
||||
pullOfDefaultFromOneOf(objectNode);
|
||||
removeRequiredOnPropsWithDefaults(objectNode);
|
||||
|
||||
return JacksonMapper.toMap(objectNode);
|
||||
} catch (IllegalArgumentException e) {
|
||||
@@ -89,6 +90,27 @@ public class JsonSchemaGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
private void removeRequiredOnPropsWithDefaults(ObjectNode objectNode) {
|
||||
objectNode.findParents("required").forEach(jsonNode -> {
|
||||
if (jsonNode instanceof ObjectNode clazzSchema && clazzSchema.get("required") instanceof ArrayNode requiredPropsNode && clazzSchema.get("properties") instanceof ObjectNode properties) {
|
||||
List<String> requiredFieldValues = StreamSupport.stream(requiredPropsNode.spliterator(), false)
|
||||
.map(JsonNode::asText)
|
||||
.toList();
|
||||
|
||||
properties.fields().forEachRemaining(e -> {
|
||||
int indexInRequiredArray = requiredFieldValues.indexOf(e.getKey());
|
||||
if (indexInRequiredArray != -1 && e.getValue() instanceof ObjectNode valueNode && valueNode.has("default")) {
|
||||
requiredPropsNode.remove(indexInRequiredArray);
|
||||
}
|
||||
});
|
||||
|
||||
if (requiredPropsNode.isEmpty()) {
|
||||
clazzSchema.remove("required");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void replaceAnyOfWithOneOf(ObjectNode objectNode) {
|
||||
objectNode.findParents("anyOf").forEach(jsonNode -> {
|
||||
if (jsonNode instanceof ObjectNode oNode) {
|
||||
@@ -296,6 +318,9 @@ public class JsonSchemaGenerator {
|
||||
if (pluginPropertyAnnotation.beta()) {
|
||||
memberAttributes.put("$beta", true);
|
||||
}
|
||||
if (pluginPropertyAnnotation.internalStorageURI()) {
|
||||
memberAttributes.put("$internalStorageURI", true);
|
||||
}
|
||||
}
|
||||
|
||||
Schema schema = member.getAnnotationConsideringFieldAndGetter(Schema.class);
|
||||
@@ -311,10 +336,12 @@ public class JsonSchemaGenerator {
|
||||
if (member.getDeclaredType().isInstanceOf(Property.class)) {
|
||||
memberAttributes.put("$dynamic", true);
|
||||
// if we are in the String definition of a Property but the target type is not String: we configure the pattern
|
||||
Class<?> targetType = member.getDeclaredType().getTypeParameters().getFirst().getErasedType();
|
||||
if (!String.class.isAssignableFrom(targetType) && String.class.isAssignableFrom(member.getType().getErasedType())) {
|
||||
memberAttributes.put("pattern", ".*{{.*}}.*");
|
||||
}
|
||||
// TODO this was a good idea but their is too much cases where it didn't work like in List or Map so if we want it we need to make it more clever
|
||||
// I keep it for now commented but at some point we may want to re-do and improve it or remove these commented lines
|
||||
// Class<?> targetType = member.getDeclaredType().getTypeParameters().getFirst().getErasedType();
|
||||
// if (!String.class.isAssignableFrom(targetType) && String.class.isAssignableFrom(member.getType().getErasedType())) {
|
||||
// memberAttributes.put("pattern", ".*{{.*}}.*");
|
||||
// }
|
||||
} else if (member.getDeclaredType().isInstanceOf(Data.class)) {
|
||||
memberAttributes.put("$dynamic", false);
|
||||
}
|
||||
@@ -603,6 +630,7 @@ public class JsonSchemaGenerator {
|
||||
ObjectNode objectNode = generator.generateSchema(cls);
|
||||
replaceAnyOfWithOneOf(objectNode);
|
||||
pullOfDefaultFromOneOf(objectNode);
|
||||
removeRequiredOnPropsWithDefaults(objectNode);
|
||||
|
||||
return JacksonMapper.toMap(extractMainRef(objectNode));
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
||||
@@ -7,6 +7,7 @@ import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.function.Predicate.not;
|
||||
@@ -40,15 +41,22 @@ public class Plugin {
|
||||
private String subGroup;
|
||||
|
||||
public static Plugin of(RegisteredPlugin registeredPlugin, @Nullable String subgroup) {
|
||||
return Plugin.of(registeredPlugin, subgroup, true);
|
||||
}
|
||||
|
||||
public static Plugin of(RegisteredPlugin registeredPlugin, @Nullable String subgroup, boolean includeDeprecated) {
|
||||
Plugin plugin = new Plugin();
|
||||
plugin.name = registeredPlugin.name();
|
||||
PluginSubGroup subGroupInfos = null;
|
||||
if (subgroup == null) {
|
||||
plugin.title = registeredPlugin.title();
|
||||
} else {
|
||||
subGroupInfos = registeredPlugin.allClass().stream().filter(c -> c.getName().contains(subgroup)).map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class)).toList().getFirst();
|
||||
plugin.title = !subGroupInfos.title().isEmpty() ? subGroupInfos.title() : subgroup.substring(subgroup.lastIndexOf('.') + 1);;
|
||||
|
||||
subGroupInfos = registeredPlugin.allClass().stream()
|
||||
.filter(c -> c.getPackageName().contains(subgroup))
|
||||
.min(Comparator.comparingInt(a -> a.getPackageName().length()))
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.orElseThrow();
|
||||
plugin.title = !subGroupInfos.title().isEmpty() ? subGroupInfos.title() : subgroup.substring(subgroup.lastIndexOf('.') + 1);
|
||||
}
|
||||
plugin.group = registeredPlugin.group();
|
||||
plugin.description = subGroupInfos != null && !subGroupInfos.description().isEmpty() ? subGroupInfos.description() : registeredPlugin.description();
|
||||
@@ -70,27 +78,28 @@ public class Plugin {
|
||||
plugin.categories = subGroupInfos != null ?
|
||||
Arrays.stream(subGroupInfos.categories()).toList() :
|
||||
registeredPlugin
|
||||
.allClass()
|
||||
.stream()
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(r -> Arrays.stream(r.categories()))
|
||||
.distinct()
|
||||
.toList();
|
||||
.allClass()
|
||||
.stream()
|
||||
.map(clazz -> clazz.getPackage().getDeclaredAnnotation(PluginSubGroup.class))
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(r -> Arrays.stream(r.categories()))
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
plugin.subGroup = subgroup;
|
||||
|
||||
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.apps = filterAndGetClassName(registeredPlugin.getApps()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters()).stream().filter(c -> subgroup == null || c.startsWith(subgroup)).toList();
|
||||
Predicate<Class<?>> packagePredicate = c -> subgroup == null || c.getPackageName().equals(subgroup);
|
||||
plugin.tasks = filterAndGetClassName(registeredPlugin.getTasks(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.triggers = filterAndGetClassName(registeredPlugin.getTriggers(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.conditions = filterAndGetClassName(registeredPlugin.getConditions(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.storages = filterAndGetClassName(registeredPlugin.getStorages(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.secrets = filterAndGetClassName(registeredPlugin.getSecrets(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.taskRunners = filterAndGetClassName(registeredPlugin.getTaskRunners(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.apps = filterAndGetClassName(registeredPlugin.getApps(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.appBlocks = filterAndGetClassName(registeredPlugin.getAppBlocks(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.charts = filterAndGetClassName(registeredPlugin.getCharts(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.dataFilters = filterAndGetClassName(registeredPlugin.getDataFilters(), includeDeprecated, packagePredicate).stream().toList();
|
||||
plugin.logExporters = filterAndGetClassName(registeredPlugin.getLogExporters(), includeDeprecated, packagePredicate).stream().toList();
|
||||
|
||||
return plugin;
|
||||
}
|
||||
@@ -99,13 +108,16 @@ public class Plugin {
|
||||
* Filters the given list of class all internal Plugin, as well as, all legacy org.kestra classes.
|
||||
* Those classes are only filtered from the documentation to ensure backward compatibility.
|
||||
*
|
||||
* @param list The list of classes?
|
||||
* @return a filtered streams.
|
||||
* @param list The list of classes?
|
||||
* @param includeDeprecated whether to include deprecated plugins or not
|
||||
* @return a filtered streams.
|
||||
*/
|
||||
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list) {
|
||||
private static List<String> filterAndGetClassName(final List<? extends Class<?>> list, boolean includeDeprecated, Predicate<Class<?>> clazzFilter) {
|
||||
return list
|
||||
.stream()
|
||||
.filter(not(io.kestra.core.models.Plugin::isInternal))
|
||||
.filter(p -> includeDeprecated || !io.kestra.core.models.Plugin.isDeprecated(p))
|
||||
.filter(clazzFilter)
|
||||
.map(Class::getName)
|
||||
.filter(c -> !c.startsWith("org.kestra."))
|
||||
.toList();
|
||||
|
||||
@@ -1,11 +1,20 @@
|
||||
package io.kestra.core.docs;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import io.kestra.core.utils.Enums;
|
||||
|
||||
|
||||
public enum SchemaType {
|
||||
flow,
|
||||
template,
|
||||
task,
|
||||
trigger,
|
||||
plugindefault,
|
||||
apps,
|
||||
dashboard
|
||||
FLOW,
|
||||
TEMPLATE,
|
||||
TASK,
|
||||
TRIGGER,
|
||||
PLUGINDEFAULT,
|
||||
APPS,
|
||||
DASHBOARD;
|
||||
|
||||
@JsonCreator
|
||||
public static SchemaType fromString(final String value) {
|
||||
return Enums.getForNameIgnoreCase(value, SchemaType.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ public enum CrudEventType {
|
||||
DELETE,
|
||||
LOGIN,
|
||||
LOGOUT,
|
||||
IMPERSONATE
|
||||
IMPERSONATE,
|
||||
LOGIN_FAILURE
|
||||
}
|
||||
|
||||
|
||||
@@ -142,12 +142,22 @@ public class HttpRequest {
|
||||
public abstract static class RequestBody {
|
||||
public abstract HttpEntity to() throws IOException;
|
||||
|
||||
public abstract Object getContent() throws IOException;
|
||||
|
||||
public abstract Charset getCharset() throws IOException;
|
||||
|
||||
public abstract String getContentType() throws IOException;
|
||||
|
||||
protected ContentType entityContentType() throws IOException {
|
||||
return this.getCharset() != null ? ContentType.create(this.getContentType(), this.getCharset()) : ContentType.create(this.getContentType());
|
||||
}
|
||||
|
||||
public static RequestBody from(HttpEntity entity) throws IOException {
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Charset charset = Charset.forName(entity.getContentEncoding());
|
||||
Charset charset = entity.getContentEncoding() != null ? Charset.forName(entity.getContentEncoding()) : StandardCharsets.UTF_8;
|
||||
|
||||
if (entity.getContentType().equals(ContentType.APPLICATION_OCTET_STREAM.getMimeType())) {
|
||||
return ByteArrayRequestBody.builder()
|
||||
@@ -172,71 +182,80 @@ public class HttpRequest {
|
||||
.build();
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Unsupported Content-Type: " + entity.getContentType());
|
||||
return ByteArrayRequestBody.builder()
|
||||
.charset(charset)
|
||||
.contentType(entity.getContentType())
|
||||
.content(entity.getContent().readAllBytes())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class InputStreamRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private String contentType = ContentType.APPLICATION_OCTET_STREAM.getMimeType();
|
||||
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
private Charset charset;
|
||||
|
||||
private InputStream content;
|
||||
|
||||
public HttpEntity to() {
|
||||
return new InputStreamEntity(content, ContentType.create(contentType, charset));
|
||||
public HttpEntity to() throws IOException {
|
||||
return new InputStreamEntity(content, this.entityContentType());
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class StringRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private String contentType = ContentType.TEXT_PLAIN.getMimeType();
|
||||
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
private Charset charset;
|
||||
|
||||
private String content;
|
||||
|
||||
public HttpEntity to() {
|
||||
return new StringEntity(this.content, ContentType.create(contentType, charset));
|
||||
public HttpEntity to() throws IOException {
|
||||
return new StringEntity(this.content, this.entityContentType());
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class ByteArrayRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private String contentType = ContentType.APPLICATION_OCTET_STREAM.getMimeType();
|
||||
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
private Charset charset;
|
||||
|
||||
private byte[] content;
|
||||
|
||||
public HttpEntity to() {
|
||||
return new ByteArrayEntity(content, ContentType.create(contentType, charset));
|
||||
public HttpEntity to() throws IOException {
|
||||
return new ByteArrayEntity(content, this.entityContentType());
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class JsonRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
private Charset charset;
|
||||
|
||||
private Object content;
|
||||
|
||||
@Override
|
||||
public String getContentType() throws IOException {
|
||||
return ContentType.APPLICATION_JSON.getMimeType();
|
||||
}
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
try {
|
||||
return new StringEntity(
|
||||
JacksonMapper.ofJson().writeValueAsString(content),
|
||||
ContentType.APPLICATION_JSON.withCharset(this.charset)
|
||||
this.charset != null ? ContentType.APPLICATION_JSON.withCharset(this.charset) : ContentType.APPLICATION_JSON
|
||||
);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IOException(e);
|
||||
@@ -244,37 +263,49 @@ public class HttpRequest {
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class UrlEncodedRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
private Charset charset;
|
||||
|
||||
private Map<String, Object> content;
|
||||
|
||||
@Override
|
||||
public String getContentType() throws IOException {
|
||||
return ContentType.APPLICATION_FORM_URLENCODED.getMimeType();
|
||||
}
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
return new UrlEncodedFormEntity(
|
||||
this.content .entrySet()
|
||||
.stream()
|
||||
.map(e -> new BasicNameValuePair(e.getKey(), e.getValue().toString()))
|
||||
.toList(),
|
||||
this.charset
|
||||
);
|
||||
List<BasicNameValuePair> list = this.content.entrySet()
|
||||
.stream()
|
||||
.map(e -> new BasicNameValuePair(e.getKey(), e.getValue().toString()))
|
||||
.toList();
|
||||
|
||||
return this.charset != null ? new UrlEncodedFormEntity(list, this.charset) : new UrlEncodedFormEntity(list);
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public static class MultipartRequestBody extends RequestBody {
|
||||
@Builder.Default
|
||||
private Charset charset = StandardCharsets.UTF_8;
|
||||
private Charset charset;
|
||||
|
||||
private Map<String, Object> content;
|
||||
|
||||
@Override
|
||||
public String getContentType() throws IOException {
|
||||
return ContentType.MULTIPART_MIXED.getMimeType();
|
||||
}
|
||||
|
||||
public HttpEntity to() throws IOException {
|
||||
MultipartEntityBuilder builder = MultipartEntityBuilder
|
||||
.create()
|
||||
.setCharset(this.charset);
|
||||
.create();
|
||||
|
||||
if (this.charset != null) {
|
||||
builder.setCharset(this.charset);
|
||||
}
|
||||
|
||||
content.forEach((key, value) -> {
|
||||
switch (value) {
|
||||
|
||||
@@ -54,7 +54,7 @@ public class HttpResponse<T> {
|
||||
null
|
||||
)
|
||||
.headers(HttpService.toHttpHeaders(response.getHeaders()))
|
||||
.body(response instanceof ClassicHttpResponse classicHttpResponse ?
|
||||
.body(response instanceof ClassicHttpResponse classicHttpResponse && classicHttpResponse.getEntity() != null ?
|
||||
IOUtils.toByteArray(classicHttpResponse.getEntity().getContent()) :
|
||||
null
|
||||
)
|
||||
|
||||
@@ -29,6 +29,7 @@ import org.apache.hc.core5.http.io.HttpClientResponseHandler;
|
||||
import org.apache.hc.core5.http.io.entity.EntityUtils;
|
||||
import org.apache.hc.core5.ssl.SSLContexts;
|
||||
import org.apache.hc.core5.util.Timeout;
|
||||
import org.codehaus.plexus.util.StringUtils;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
@@ -87,47 +88,49 @@ public class HttpClient implements Closeable {
|
||||
|
||||
// Timeout
|
||||
if (this.configuration.getTimeout() != null) {
|
||||
var connectTiemout = runContext.render(this.configuration.getTimeout().getConnectTimeout()).as(Duration.class);
|
||||
connectTiemout.ifPresent(duration -> connectionConfig.setConnectTimeout(Timeout.of(duration)));
|
||||
var connectTimeout = runContext.render(this.configuration.getTimeout().getConnectTimeout()).as(Duration.class);
|
||||
connectTimeout.ifPresent(duration -> connectionConfig.setConnectTimeout(Timeout.of(duration)));
|
||||
|
||||
var readIdleTiemout = runContext.render(this.configuration.getTimeout().getReadIdleTimeout()).as(Duration.class);
|
||||
readIdleTiemout.ifPresent(duration -> connectionConfig.setSocketTimeout(Timeout.of(duration)));
|
||||
var readIdleTimeout = runContext.render(this.configuration.getTimeout().getReadIdleTimeout()).as(Duration.class);
|
||||
readIdleTimeout.ifPresent(duration -> connectionConfig.setSocketTimeout(Timeout.of(duration)));
|
||||
}
|
||||
|
||||
// proxy
|
||||
if (this.configuration.getProxy() != null) {
|
||||
SocketAddress proxyAddr = new InetSocketAddress(
|
||||
runContext.render(configuration.getProxy().getAddress()).as(String.class).orElse(null),
|
||||
runContext.render(configuration.getProxy().getPort()).as(Integer.class).orElse(null)
|
||||
);
|
||||
if (this.configuration.getProxy() != null && configuration.getProxy().getAddress() != null) {
|
||||
String proxyAddress = runContext.render(configuration.getProxy().getAddress()).as(String.class).orElse(null);
|
||||
|
||||
Proxy proxy = new Proxy(runContext.render(configuration.getProxy().getType()).as(Proxy.Type.class).orElse(null), proxyAddr);
|
||||
|
||||
builder.setProxySelector(new ProxySelector() {
|
||||
@Override
|
||||
public void connectFailed(URI uri, SocketAddress sa, IOException e) {
|
||||
/* ignore */
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Proxy> select(URI uri) {
|
||||
return List.of(proxy);
|
||||
}
|
||||
});
|
||||
|
||||
if (this.configuration.getProxy().getUsername() != null && this.configuration.getProxy().getPassword() != null) {
|
||||
builder.setProxyAuthenticationStrategy(new DefaultAuthenticationStrategy());
|
||||
|
||||
credentialsStore.setCredentials(
|
||||
new AuthScope(
|
||||
runContext.render(this.configuration.getProxy().getAddress()).as(String.class).orElse(null),
|
||||
runContext.render(this.configuration.getProxy().getPort()).as(Integer.class).orElse(null)
|
||||
),
|
||||
new UsernamePasswordCredentials(
|
||||
runContext.render(this.configuration.getProxy().getUsername()).as(String.class).orElseThrow(),
|
||||
runContext.render(this.configuration.getProxy().getPassword()).as(String.class).orElseThrow().toCharArray()
|
||||
)
|
||||
if (StringUtils.isNotEmpty(proxyAddress)) {
|
||||
int port = runContext.render(configuration.getProxy().getPort()).as(Integer.class).orElseThrow();
|
||||
SocketAddress proxyAddr = new InetSocketAddress(
|
||||
proxyAddress,
|
||||
port
|
||||
);
|
||||
|
||||
Proxy proxy = new Proxy(runContext.render(configuration.getProxy().getType()).as(Proxy.Type.class).orElse(null), proxyAddr);
|
||||
|
||||
builder.setProxySelector(new ProxySelector() {
|
||||
@Override
|
||||
public void connectFailed(URI uri, SocketAddress sa, IOException e) {
|
||||
/* ignore */
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Proxy> select(URI uri) {
|
||||
return List.of(proxy);
|
||||
}
|
||||
});
|
||||
|
||||
if (this.configuration.getProxy().getUsername() != null && this.configuration.getProxy().getPassword() != null) {
|
||||
builder.setProxyAuthenticationStrategy(new DefaultAuthenticationStrategy());
|
||||
|
||||
credentialsStore.setCredentials(
|
||||
new AuthScope(proxyAddress, port),
|
||||
new UsernamePasswordCredentials(
|
||||
runContext.render(this.configuration.getProxy().getUsername()).as(String.class).orElseThrow(),
|
||||
runContext.render(this.configuration.getProxy().getPassword()).as(String.class).orElseThrow().toCharArray()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ package io.kestra.core.http.client.configurations;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
|
||||
|
||||
@@ -14,8 +14,9 @@ import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
|
||||
@JsonSubTypes.Type(value = BearerAuthConfiguration.class, name = "BEARER")
|
||||
})
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@NoArgsConstructor
|
||||
public abstract class AbstractAuthConfiguration {
|
||||
public abstract Property<AuthType> getType();
|
||||
public abstract AuthType getType();
|
||||
|
||||
public abstract void configure(HttpClientBuilder builder, RunContext runContext) throws IllegalVariableEvaluationException;
|
||||
|
||||
|
||||
@@ -6,8 +6,7 @@ import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
|
||||
import org.apache.hc.core5.http.HttpHeaders;
|
||||
@@ -16,19 +15,20 @@ import org.apache.hc.core5.http.message.BasicHeader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Base64;
|
||||
|
||||
@Getter
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
public class BasicAuthConfiguration extends AbstractAuthConfiguration {
|
||||
@NotNull
|
||||
@JsonInclude
|
||||
@Builder.Default
|
||||
protected Property<AuthType> type = Property.of(AuthType.BASIC);
|
||||
protected AuthType type = AuthType.BASIC;
|
||||
|
||||
@Schema(title = "The username for HTTP basic authentication.")
|
||||
private final Property<String> username;
|
||||
private Property<String> username;
|
||||
|
||||
@Schema(title = "The password for HTTP basic authentication.")
|
||||
private final Property<String> password;
|
||||
private Property<String> password;
|
||||
|
||||
@Override
|
||||
public void configure(HttpClientBuilder builder, RunContext runContext) throws IllegalVariableEvaluationException {
|
||||
|
||||
@@ -8,21 +8,23 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.apache.hc.client5.http.impl.classic.HttpClientBuilder;
|
||||
import org.apache.hc.core5.http.HttpHeaders;
|
||||
import org.apache.hc.core5.http.message.BasicHeader;
|
||||
|
||||
@Getter
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
public class BearerAuthConfiguration extends AbstractAuthConfiguration {
|
||||
@NotNull
|
||||
@JsonInclude
|
||||
@Builder.Default
|
||||
protected Property<AuthType> type = Property.of(AuthType.BEARER);
|
||||
protected AuthType type = AuthType.BEARER;
|
||||
|
||||
@Schema(title = "The token for bearer token authentication.")
|
||||
private final Property<String> token;
|
||||
private Property<String> token;
|
||||
|
||||
@Override
|
||||
public void configure(HttpClientBuilder builder, RunContext runContext) throws IllegalVariableEvaluationException {
|
||||
|
||||
@@ -2,20 +2,21 @@ package io.kestra.core.http.client.configurations;
|
||||
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.micronaut.http.client.HttpClientConfiguration;
|
||||
import io.micronaut.logging.LogLevel;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
|
||||
import java.net.Proxy;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
|
||||
@Builder(toBuilder = true)
|
||||
@Getter
|
||||
@Jacksonized
|
||||
public class HttpConfiguration {
|
||||
@Schema(title = "The timeout configuration.")
|
||||
@PluginProperty
|
||||
@@ -28,6 +29,7 @@ public class HttpConfiguration {
|
||||
@Schema(title = "The authentification to use.")
|
||||
private AbstractAuthConfiguration auth;
|
||||
|
||||
@Setter
|
||||
@Schema(title = "The SSL request options")
|
||||
private SslOptions ssl;
|
||||
|
||||
@@ -35,6 +37,7 @@ public class HttpConfiguration {
|
||||
@Builder.Default
|
||||
private Property<Boolean> followRedirects = Property.of(true);
|
||||
|
||||
@Setter
|
||||
@Schema(title = "If true, allow a failed response code (response code >= 400)")
|
||||
@Builder.Default
|
||||
private Property<Boolean> allowFailed = Property.of(false);
|
||||
@@ -55,261 +58,212 @@ public class HttpConfiguration {
|
||||
}
|
||||
|
||||
// Deprecated properties
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The time allowed to establish a connection to the server before failing.")
|
||||
@Deprecated
|
||||
private final Property<Duration> connectTimeout;
|
||||
private final Duration connectTimeout;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setConnectTimeout(Property<Duration> connectTimeout) {
|
||||
if (this.timeout == null) {
|
||||
this.timeout = TimeoutConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.timeout = this.timeout.toBuilder()
|
||||
.connectTimeout(connectTimeout)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The maximum time allowed for reading data from the server before failing.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Property<Duration> readTimeout = Property.of(Duration.ofSeconds(HttpClientConfiguration.DEFAULT_READ_TIMEOUT_SECONDS));
|
||||
private final Duration readTimeout;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setReadTimeout(Property<Duration> readTimeout) {
|
||||
if (this.timeout == null) {
|
||||
this.timeout = TimeoutConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.timeout = this.timeout.toBuilder()
|
||||
.readIdleTimeout(readTimeout)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The type of proxy to use.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Property<Proxy.Type> proxyType = Property.of(Proxy.Type.DIRECT);
|
||||
private final Proxy.Type proxyType;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setProxyType(Property<Proxy.Type> proxyType) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.type(proxyType)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The address of the proxy server.")
|
||||
@Deprecated
|
||||
private final Property<String> proxyAddress;
|
||||
private final String proxyAddress;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setProxyAddress(Property<String> proxyAddress) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.address(proxyAddress)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The port of the proxy server.")
|
||||
@Deprecated
|
||||
private final Property<Integer> proxyPort;
|
||||
private final Integer proxyPort;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setProxyPort(Property<Integer> proxyPort) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.port(proxyPort)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The username for proxy authentication.")
|
||||
@Deprecated
|
||||
private final Property<String> proxyUsername;
|
||||
private final String proxyUsername;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setProxyUsername(Property<String> proxyUsername) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.username(proxyUsername)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The password for proxy authentication.")
|
||||
@Deprecated
|
||||
private final Property<String> proxyPassword;
|
||||
private final String proxyPassword;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setProxyPassword(Property<String> proxyPassword) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.password(proxyPassword)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The username for HTTP basic authentication.")
|
||||
@Deprecated
|
||||
private final Property<String> basicAuthUser;
|
||||
private final String basicAuthUser;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public void setBasicAuthUser(Property<String> basicAuthUser) {
|
||||
if (this.auth == null || !(this.auth instanceof BasicAuthConfiguration)) {
|
||||
this.auth = BasicAuthConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.auth = ((BasicAuthConfiguration) this.auth).toBuilder()
|
||||
.username(basicAuthUser)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The password for HTTP basic authentication.")
|
||||
@Deprecated
|
||||
private final Property<String> basicAuthPassword;
|
||||
private final String basicAuthPassword;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
private void setBasicAuthPassword(Property<String> basicAuthPassword) {
|
||||
if (this.auth == null || !(this.auth instanceof BasicAuthConfiguration)) {
|
||||
this.auth = BasicAuthConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.auth = ((BasicAuthConfiguration) this.auth).toBuilder()
|
||||
.password(basicAuthPassword)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The log level for the HTTP client.")
|
||||
@PluginProperty
|
||||
@Deprecated
|
||||
private final LogLevel logLevel;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
// Deprecated properties with no equivalent value to be kept, silently ignore
|
||||
@Schema(title = "The time allowed for a read connection to remain idle before closing it.")
|
||||
@Deprecated
|
||||
private void setLogLevel(LogLevel logLevel) {
|
||||
if (logLevel == LogLevel.TRACE) {
|
||||
this.logs = new LoggingType[]{
|
||||
LoggingType.REQUEST_HEADERS,
|
||||
LoggingType.REQUEST_BODY,
|
||||
LoggingType.RESPONSE_HEADERS,
|
||||
LoggingType.RESPONSE_BODY
|
||||
};
|
||||
} else if (logLevel == LogLevel.DEBUG) {
|
||||
this.logs = new LoggingType[]{
|
||||
LoggingType.REQUEST_HEADERS,
|
||||
LoggingType.RESPONSE_HEADERS,
|
||||
};
|
||||
} else if (logLevel == LogLevel.INFO) {
|
||||
this.logs = new LoggingType[]{
|
||||
LoggingType.RESPONSE_HEADERS,
|
||||
};
|
||||
private final Duration readIdleTimeout;
|
||||
|
||||
@Schema(title = "The time an idle connection can remain in the client's connection pool before being closed.")
|
||||
@Deprecated
|
||||
private final Duration connectionPoolIdleTimeout;
|
||||
|
||||
@Schema(title = "The maximum content length of the response.")
|
||||
@Deprecated
|
||||
private final Integer maxContentLength;
|
||||
|
||||
public static class HttpConfigurationBuilder {
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder connectTimeout(Duration connectTimeout) {
|
||||
if (this.timeout == null) {
|
||||
this.timeout = TimeoutConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.timeout = this.timeout.toBuilder()
|
||||
.connectTimeout(Property.of(connectTimeout))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder readTimeout(Duration readTimeout) {
|
||||
if (this.timeout == null) {
|
||||
this.timeout = TimeoutConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.timeout = this.timeout.toBuilder()
|
||||
.readIdleTimeout(Property.of(readTimeout))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder proxyType(Proxy.Type proxyType) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.type(Property.of(proxyType))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder proxyAddress(String proxyAddress) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.address(Property.of(proxyAddress))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder proxyPort(Integer proxyPort) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.port(Property.of(proxyPort))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder proxyUsername(String proxyUsername) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.username(Property.of(proxyUsername))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder proxyPassword(String proxyPassword) {
|
||||
if (this.proxy == null) {
|
||||
this.proxy = ProxyConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.proxy = this.proxy.toBuilder()
|
||||
.password(Property.of(proxyPassword))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("DeprecatedIsStillUsed")
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder basicAuthUser(String basicAuthUser) {
|
||||
if (this.auth == null || !(this.auth instanceof BasicAuthConfiguration)) {
|
||||
this.auth = BasicAuthConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.auth = ((BasicAuthConfiguration) this.auth).toBuilder()
|
||||
.username(Property.of(basicAuthUser))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("DeprecatedIsStillUsed")
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder basicAuthPassword(String basicAuthPassword) {
|
||||
if (this.auth == null || !(this.auth instanceof BasicAuthConfiguration)) {
|
||||
this.auth = BasicAuthConfiguration.builder()
|
||||
.build();
|
||||
}
|
||||
|
||||
this.auth = ((BasicAuthConfiguration) this.auth).toBuilder()
|
||||
.password(Property.of(basicAuthPassword))
|
||||
.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HttpConfigurationBuilder logLevel(LogLevel logLevel) {
|
||||
if (logLevel == LogLevel.TRACE) {
|
||||
this.logs = new LoggingType[]{
|
||||
LoggingType.REQUEST_HEADERS,
|
||||
LoggingType.REQUEST_BODY,
|
||||
LoggingType.RESPONSE_HEADERS,
|
||||
LoggingType.RESPONSE_BODY
|
||||
};
|
||||
} else if (logLevel == LogLevel.DEBUG) {
|
||||
this.logs = new LoggingType[]{
|
||||
LoggingType.REQUEST_HEADERS,
|
||||
LoggingType.RESPONSE_HEADERS,
|
||||
};
|
||||
} else if (logLevel == LogLevel.INFO) {
|
||||
this.logs = new LoggingType[]{
|
||||
LoggingType.RESPONSE_HEADERS,
|
||||
};
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
// Deprecated properties with no real value to be kept, silently ignore
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The time allowed for a read connection to remain idle before closing it.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Property<Duration> readIdleTimeout = Property.of(Duration.of(HttpClientConfiguration.DEFAULT_READ_IDLE_TIMEOUT_MINUTES, ChronoUnit.MINUTES));
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The time an idle connection can remain in the client's connection pool before being closed.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Property<Duration> connectionPoolIdleTimeout = Property.of(Duration.ofSeconds(HttpClientConfiguration.DEFAULT_CONNECTION_POOL_IDLE_TIMEOUT_SECONDS));
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Schema(title = "The maximum content length of the response.")
|
||||
@Builder.Default
|
||||
@Deprecated
|
||||
private final Property<Integer> maxContentLength = Property.of(HttpClientConfiguration.DEFAULT_MAX_CONTENT_LENGTH);
|
||||
}
|
||||
|
||||
@@ -4,11 +4,13 @@ import io.kestra.core.models.property.Property;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
|
||||
import java.net.Proxy;
|
||||
|
||||
@Getter
|
||||
@Builder(toBuilder = true)
|
||||
@Jacksonized
|
||||
public class ProxyConfiguration {
|
||||
@Schema(title = "The type of proxy to use.")
|
||||
@Builder.Default
|
||||
|
||||
17
core/src/main/java/io/kestra/core/log/KestraLogFilter.java
Normal file
17
core/src/main/java/io/kestra/core/log/KestraLogFilter.java
Normal file
@@ -0,0 +1,17 @@
|
||||
package io.kestra.core.log;
|
||||
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import ch.qos.logback.core.boolex.EvaluationException;
|
||||
import ch.qos.logback.core.boolex.EventEvaluatorBase;
|
||||
|
||||
public class KestraLogFilter extends EventEvaluatorBase<ILoggingEvent> {
|
||||
@Override
|
||||
public boolean evaluate(ILoggingEvent event) throws NullPointerException, EvaluationException {
|
||||
var message = event.getMessage();
|
||||
// as this filter is called very often, for perf,
|
||||
// we use startWith and do all checks successfully instead of using a more elegant construct like Stream...
|
||||
return message.startsWith("outOfOrder mode is active. Migration of schema") ||
|
||||
message.startsWith("Version mismatch : Database version is older than what dialect POSTGRES supports") ||
|
||||
message.startsWith("Failed to bind as java.util.concurrent.Executors$AutoShutdownDelegatedExecutorService is unsupported.");
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package io.kestra.core.models;
|
||||
import io.kestra.core.utils.MapUtils;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -46,4 +47,19 @@ public record Label(@NotNull String key, @NotNull String value) {
|
||||
.map(entry -> new Label(entry.getKey(), entry.getValue()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for converting a label string to a map.
|
||||
*
|
||||
* @param label The label string.
|
||||
* @return The map of key/value labels.
|
||||
*/
|
||||
public static Map<String, String> from(String label) {
|
||||
Map<String, String> map = new HashMap<>();
|
||||
String[] keyValueArray = label.split(":");
|
||||
if (keyValueArray.length == 2) {
|
||||
map.put(keyValueArray[0], keyValueArray[1]);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
/**
|
||||
* Interface that can be implemented by classes supporting plugin versioning.
|
||||
*
|
||||
* @see Plugin
|
||||
*/
|
||||
public interface PluginVersioning {
|
||||
|
||||
@Pattern(regexp="\\d+\\.\\d+\\.\\d+(-[a-zA-Z0-9]+)?|([a-zA-Z0-9]+)")
|
||||
@Schema(title = "The version of the plugin to use.")
|
||||
String getVersion();
|
||||
}
|
||||
292
core/src/main/java/io/kestra/core/models/QueryFilter.java
Normal file
292
core/src/main/java/io/kestra/core/models/QueryFilter.java
Normal file
@@ -0,0 +1,292 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
import io.kestra.core.models.dashboards.filters.*;
|
||||
import io.kestra.core.utils.Enums;
|
||||
import lombok.Builder;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Builder
|
||||
public record QueryFilter(
|
||||
Field field,
|
||||
Op operation,
|
||||
Object value
|
||||
) {
|
||||
|
||||
@JsonCreator
|
||||
public QueryFilter(
|
||||
@JsonProperty("field") Field field,
|
||||
@JsonProperty("operation") Op operation,
|
||||
@JsonProperty("value") Object value
|
||||
) {
|
||||
this.field = field;
|
||||
this.operation = operation;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public enum Op {
|
||||
EQUALS,
|
||||
NOT_EQUALS,
|
||||
GREATER_THAN,
|
||||
LESS_THAN,
|
||||
GREATER_THAN_OR_EQUAL_TO,
|
||||
LESS_THAN_OR_EQUAL_TO,
|
||||
IN,
|
||||
NOT_IN,
|
||||
STARTS_WITH,
|
||||
ENDS_WITH,
|
||||
CONTAINS,
|
||||
REGEX;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T extends Enum<T>> AbstractFilter<T> toDashboardFilterBuilder(T field, Object value) {
|
||||
switch (this.operation) {
|
||||
case EQUALS:
|
||||
return EqualTo.<T>builder().field(field).value(value).build();
|
||||
case NOT_EQUALS:
|
||||
return NotEqualTo.<T>builder().field(field).value(value).build();
|
||||
case GREATER_THAN:
|
||||
return GreaterThan.<T>builder().field(field).value(value).build();
|
||||
case LESS_THAN:
|
||||
return LessThan.<T>builder().field(field).value(value).build();
|
||||
case GREATER_THAN_OR_EQUAL_TO:
|
||||
return GreaterThanOrEqualTo.<T>builder().field(field).value(value).build();
|
||||
case LESS_THAN_OR_EQUAL_TO:
|
||||
return LessThanOrEqualTo.<T>builder().field(field).value(value).build();
|
||||
case IN:
|
||||
return In.<T>builder().field(field).values((List<Object>) value).build();
|
||||
case NOT_IN:
|
||||
return NotIn.<T>builder().field(field).values((List<Object>) value).build();
|
||||
case STARTS_WITH:
|
||||
return StartsWith.<T>builder().field(field).value(value.toString()).build();
|
||||
case ENDS_WITH:
|
||||
return EndsWith.<T>builder().field(field).value(value.toString()).build();
|
||||
case CONTAINS:
|
||||
return Contains.<T>builder().field(field).value(value.toString()).build();
|
||||
case REGEX:
|
||||
return Regex.<T>builder().field(field).value(value.toString()).build();
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported operation: " + this.operation);
|
||||
}
|
||||
}
|
||||
|
||||
public enum Field {
|
||||
QUERY("q") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.REGEX);
|
||||
}
|
||||
},
|
||||
SCOPE("scope") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
NAMESPACE("namespace") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
|
||||
}
|
||||
},
|
||||
LABELS("labels") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
FLOW_ID("flowId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
START_DATE("startDate") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.GREATER_THAN, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
END_DATE("endDate") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.GREATER_THAN, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
STATE("state") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
TIME_RANGE("timeRange") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH,
|
||||
Op.ENDS_WITH, Op.IN, Op.NOT_IN, Op.REGEX);
|
||||
}
|
||||
},
|
||||
TRIGGER_EXECUTION_ID("triggerExecutionId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
TRIGGER_ID("triggerId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
CHILD_FILTER("childFilter") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
WORKER_ID("workerId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
EXISTING_ONLY("existingOnly") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
MIN_LEVEL("level") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
};
|
||||
|
||||
private static final Map<String, Field> BY_VALUE = Arrays.stream(values())
|
||||
.collect(Collectors.toMap(Field::value, Function.identity()));
|
||||
|
||||
public abstract List<Op> supportedOp();
|
||||
|
||||
private final String value;
|
||||
|
||||
Field(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@JsonCreator
|
||||
public static Field fromString(String value) {
|
||||
return Enums.fromString(value, BY_VALUE, "field");
|
||||
}
|
||||
|
||||
@JsonValue
|
||||
public String value() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
public enum Resource {
|
||||
FLOW {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
|
||||
}
|
||||
},
|
||||
NAMESPACE {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.EXISTING_ONLY);
|
||||
}
|
||||
},
|
||||
EXECUTION {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(
|
||||
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE, Field.TIME_RANGE,
|
||||
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
|
||||
Field.NAMESPACE
|
||||
);
|
||||
}
|
||||
},
|
||||
LOG {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.NAMESPACE, Field.START_DATE, Field.END_DATE,
|
||||
Field.FLOW_ID, Field.TRIGGER_ID, Field.MIN_LEVEL
|
||||
);
|
||||
}
|
||||
},
|
||||
TASK {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.NAMESPACE, Field.QUERY, Field.END_DATE, Field.FLOW_ID, Field.START_DATE,
|
||||
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER
|
||||
);
|
||||
}
|
||||
},
|
||||
TEMPLATE {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.NAMESPACE, Field.QUERY);
|
||||
}
|
||||
},
|
||||
TRIGGER {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.QUERY, Field.NAMESPACE, Field.WORKER_ID, Field.FLOW_ID
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
public abstract List<Field> supportedField();
|
||||
|
||||
/**
|
||||
* Converts {@code Resource} enums to a list of {@code ResourceField},
|
||||
* including fields and their supported operations.
|
||||
*
|
||||
* @return List of {@code ResourceField} with resource names, fields, and operations.
|
||||
*/
|
||||
public static List<ResourceField> asResourceList() {
|
||||
return Arrays.stream(values())
|
||||
.map(Resource::toResourceField)
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static ResourceField toResourceField(Resource resource) {
|
||||
List<FieldOp> fieldOps = resource.supportedField().stream()
|
||||
.map(Resource::toFieldInfo)
|
||||
.toList();
|
||||
return new ResourceField(resource.name().toLowerCase(), fieldOps);
|
||||
}
|
||||
|
||||
private static FieldOp toFieldInfo(Field field) {
|
||||
List<Operation> operations = field.supportedOp().stream()
|
||||
.map(Resource::toOperation)
|
||||
.toList();
|
||||
return new FieldOp(field.name().toLowerCase(), field.value(), operations);
|
||||
}
|
||||
|
||||
private static Operation toOperation(Op op) {
|
||||
return new Operation(op.name(), op.name());
|
||||
}
|
||||
}
|
||||
|
||||
public record ResourceField(String name, List<FieldOp> fields) {
|
||||
}
|
||||
|
||||
public record FieldOp(String name, String value, List<Operation> operations) {
|
||||
}
|
||||
|
||||
public record Operation(String name, String value) {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import com.google.common.annotations.VisibleForTesting;
|
||||
import io.kestra.core.repositories.ServiceInstanceRepositoryInterface;
|
||||
import io.kestra.core.server.Service;
|
||||
import io.kestra.core.server.ServiceInstance;
|
||||
import io.kestra.core.server.ServiceType;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.RoundingMode;
|
||||
@@ -60,7 +61,7 @@ public record ServiceUsage(
|
||||
final Duration interval) {
|
||||
|
||||
List<DailyServiceStatistics> statistics = Arrays
|
||||
.stream(Service.ServiceType.values())
|
||||
.stream(ServiceType.values())
|
||||
.map(type -> of(from, to, repository, type, interval))
|
||||
.toList();
|
||||
return new ServiceUsage(statistics);
|
||||
@@ -69,13 +70,13 @@ public record ServiceUsage(
|
||||
private static DailyServiceStatistics of(final Instant from,
|
||||
final Instant to,
|
||||
final ServiceInstanceRepositoryInterface repository,
|
||||
final Service.ServiceType serviceType,
|
||||
final ServiceType serviceType,
|
||||
final Duration interval) {
|
||||
return of(serviceType, interval, repository.findAllInstancesBetween(serviceType, from, to));
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static DailyServiceStatistics of(final Service.ServiceType serviceType,
|
||||
static DailyServiceStatistics of(final ServiceType serviceType,
|
||||
final Duration interval,
|
||||
final List<ServiceInstance> instances) {
|
||||
// Compute the number of running service per time-interval.
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
package io.kestra.core.models.dashboards;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeName;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.dashboards.filters.AbstractFilter;
|
||||
import io.kestra.core.repositories.QueryBuilderInterface;
|
||||
import io.kestra.plugin.core.dashboard.data.Executions;
|
||||
import io.kestra.plugin.core.dashboard.data.Logs;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
@@ -17,6 +13,7 @@ import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -47,6 +44,6 @@ public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F
|
||||
|
||||
public abstract Class<? extends QueryBuilderInterface<F>> repositoryClass();
|
||||
|
||||
public abstract void setGlobalFilter(GlobalFilter globalFilter);
|
||||
public abstract void setGlobalFilter(List<QueryFilter> queryFilterList, ZonedDateTime startDate, ZonedDateTime endDate);
|
||||
|
||||
}
|
||||
|
||||
@@ -122,6 +122,10 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
return newExecution(flow, null, labels, Optional.empty());
|
||||
}
|
||||
|
||||
public List<Label> getLabels() {
|
||||
return Optional.ofNullable(this.labels).orElse(new ArrayList<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method for constructing a new {@link Execution} object for the given {@link Flow} and
|
||||
* inputs.
|
||||
@@ -816,11 +820,20 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
));
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
for (TaskRun current : this.taskRunList) {
|
||||
if (current.getOutputs() != null) {
|
||||
result = MapUtils.merge(result, outputs(current, byIds));
|
||||
}
|
||||
}
|
||||
this.taskRunList.stream()
|
||||
.filter(taskRun -> taskRun.getOutputs() != null)
|
||||
.collect(Collectors.groupingBy(taskRun -> taskRun.getTaskId()))
|
||||
.forEach((taskId, taskRuns) -> {
|
||||
Map<String, Object> taskOutputs = new HashMap<>();
|
||||
for (TaskRun current : taskRuns) {
|
||||
if (current.getIteration() != null) {
|
||||
taskOutputs = MapUtils.merge(taskOutputs, outputs(current, byIds));
|
||||
} else {
|
||||
taskOutputs.putAll(outputs(current, byIds));
|
||||
}
|
||||
}
|
||||
result.put(taskId, taskOutputs);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -833,18 +846,17 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
|
||||
if (parents.isEmpty()) {
|
||||
if (taskRun.getValue() == null) {
|
||||
return Map.of(taskRun.getTaskId(), taskRun.getOutputs());
|
||||
return taskRun.getOutputs();
|
||||
} else {
|
||||
return Map.of(taskRun.getTaskId(),
|
||||
Map.of(taskRun.getValue(), taskRun.getOutputs()));
|
||||
return Map.of(taskRun.getValue(), taskRun.getOutputs());
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Object> result = MapUtils.newHashMap(1);
|
||||
Map<String, Object> result = HashMap.newHashMap(1);
|
||||
Map<String, Object> current = result;
|
||||
|
||||
for (TaskRun t : parents) {
|
||||
HashMap<String, Object> item = MapUtils.newHashMap(1);
|
||||
HashMap<String, Object> item = HashMap.newHashMap(1);
|
||||
current.put(t.getValue(), item);
|
||||
current = item;
|
||||
}
|
||||
@@ -857,7 +869,7 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
}
|
||||
}
|
||||
|
||||
return Map.of(taskRun.getTaskId(), result);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -21,12 +21,14 @@ import io.kestra.core.models.tasks.FlowableTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.models.validations.ManualConstraintViolation;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
|
||||
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.kestra.core.validations.FlowValidation;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@@ -36,8 +38,6 @@ import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -94,6 +94,9 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
@Deprecated
|
||||
List<Listener> listeners;
|
||||
|
||||
@Valid
|
||||
List<Task> afterExecution;
|
||||
|
||||
@Valid
|
||||
List<AbstractTrigger> triggers;
|
||||
|
||||
@@ -133,11 +136,6 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
List<SLA> sla;
|
||||
|
||||
|
||||
public Logger logger() {
|
||||
return LoggerFactory.getLogger("flow." + this.id);
|
||||
}
|
||||
|
||||
|
||||
/** {@inheritDoc **/
|
||||
@Override
|
||||
@JsonIgnore
|
||||
@@ -176,6 +174,14 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
);
|
||||
}
|
||||
|
||||
public static String uid(Trigger trigger) {
|
||||
return IdUtils.fromParts(
|
||||
trigger.getTenantId(),
|
||||
trigger.getNamespace(),
|
||||
trigger.getFlowId()
|
||||
);
|
||||
}
|
||||
|
||||
public static String uidWithoutRevision(Execution execution) {
|
||||
return IdUtils.fromParts(
|
||||
execution.getTenantId(),
|
||||
@@ -195,10 +201,10 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
|
||||
public Stream<Task> allTasks() {
|
||||
return Stream.of(
|
||||
this.tasks != null ? this.tasks : new ArrayList<Task>(),
|
||||
this.errors != null ? this.errors : new ArrayList<Task>(),
|
||||
this._finally != null ? this._finally : new ArrayList<Task>(),
|
||||
this.listenersTasks()
|
||||
this.tasks != null ? this.tasks : Collections.<Task>emptyList(),
|
||||
this.errors != null ? this.errors : Collections.<Task>emptyList(),
|
||||
this._finally != null ? this._finally : Collections.<Task>emptyList(),
|
||||
this.afterExecutionTasks()
|
||||
)
|
||||
.flatMap(Collection::stream);
|
||||
}
|
||||
@@ -278,6 +284,14 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
public AbstractTrigger findTriggerByTriggerId(String triggerId) {
|
||||
return this.triggers
|
||||
.stream()
|
||||
.filter(trigger -> trigger.getId().equals(triggerId))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated should not be used
|
||||
*/
|
||||
@@ -320,15 +334,11 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
}
|
||||
}
|
||||
|
||||
private List<Task> listenersTasks() {
|
||||
if (this.getListeners() == null) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
return this.getListeners()
|
||||
.stream()
|
||||
.flatMap(listener -> listener.getTasks().stream())
|
||||
.toList();
|
||||
private List<Task> afterExecutionTasks() {
|
||||
return ListUtils.concat(
|
||||
ListUtils.emptyOnNull(this.getListeners()).stream().flatMap(listener -> listener.getTasks().stream()).toList(),
|
||||
this.getAfterExecution()
|
||||
);
|
||||
}
|
||||
|
||||
public boolean equalsWithoutRevision(Flow o) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.TaskForExecution;
|
||||
import io.kestra.core.models.triggers.AbstractTriggerForExecution;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
@@ -29,6 +30,9 @@ public class FlowForExecution extends AbstractFlow {
|
||||
@JsonProperty("finally")
|
||||
List<TaskForExecution> _finally;
|
||||
|
||||
@Valid
|
||||
List<TaskForExecution> afterExecution;
|
||||
|
||||
@Valid
|
||||
List<AbstractTriggerForExecution> triggers;
|
||||
|
||||
@@ -42,6 +46,7 @@ public class FlowForExecution extends AbstractFlow {
|
||||
.tasks(flow.getTasks().stream().map(TaskForExecution::of).toList())
|
||||
.errors(ListUtils.emptyOnNull(flow.getErrors()).stream().map(TaskForExecution::of).toList())
|
||||
._finally(ListUtils.emptyOnNull(flow.getFinally()).stream().map(TaskForExecution::of).toList())
|
||||
.afterExecution(ListUtils.emptyOnNull(flow.getAfterExecution()).stream().map(TaskForExecution::of).toList())
|
||||
.triggers(ListUtils.emptyOnNull(flow.getTriggers()).stream().map(AbstractTriggerForExecution::of).toList())
|
||||
.disabled(flow.isDisabled())
|
||||
.deleted(flow.isDeleted())
|
||||
|
||||
@@ -31,6 +31,7 @@ public class FlowWithSource extends Flow implements HasSource {
|
||||
.errors(this.errors)
|
||||
._finally(this._finally)
|
||||
.listeners(this.listeners)
|
||||
.afterExecution(this.afterExecution)
|
||||
.triggers(this.triggers)
|
||||
.pluginDefaults(this.pluginDefaults)
|
||||
.disabled(this.disabled)
|
||||
@@ -72,6 +73,7 @@ public class FlowWithSource extends Flow implements HasSource {
|
||||
.tasks(flow.tasks)
|
||||
.errors(flow.errors)
|
||||
._finally(flow._finally)
|
||||
.afterExecution(flow.afterExecution)
|
||||
.listeners(flow.listeners)
|
||||
.triggers(flow.triggers)
|
||||
.pluginDefaults(flow.pluginDefaults)
|
||||
|
||||
@@ -55,6 +55,7 @@ public abstract class AbstractGraph {
|
||||
|
||||
public enum BranchType {
|
||||
ERROR,
|
||||
FINALLY
|
||||
FINALLY,
|
||||
AFTER_EXECUTION
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,6 +33,9 @@ public class GraphCluster extends AbstractGraph {
|
||||
return _finally;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
private final GraphClusterAfterExecution afterExecution;
|
||||
|
||||
@JsonIgnore
|
||||
private final GraphClusterEnd end;
|
||||
|
||||
@@ -50,14 +53,17 @@ public class GraphCluster extends AbstractGraph {
|
||||
this.relationType = null;
|
||||
this.root = new GraphClusterRoot();
|
||||
this._finally = new GraphClusterFinally();
|
||||
this.afterExecution = new GraphClusterAfterExecution();
|
||||
this.end = new GraphClusterEnd();
|
||||
this.taskNode = null;
|
||||
|
||||
this.addNode(this.root);
|
||||
this.addNode(this._finally);
|
||||
this.addNode(this.afterExecution);
|
||||
this.addNode(this.end);
|
||||
|
||||
this.addEdge(this.getFinally(), this.getEnd(), new Relation());
|
||||
this.addEdge(this.getFinally(), this.getAfterExecution(), new Relation());
|
||||
this.addEdge(this.getAfterExecution(), this.getEnd(), new Relation());
|
||||
}
|
||||
|
||||
public GraphCluster(Task task, TaskRun taskRun, List<String> values, RelationType relationType) {
|
||||
@@ -74,14 +80,17 @@ public class GraphCluster extends AbstractGraph {
|
||||
this.relationType = relationType;
|
||||
this.root = new GraphClusterRoot();
|
||||
this._finally = new GraphClusterFinally();
|
||||
this.afterExecution = new GraphClusterAfterExecution();
|
||||
this.end = new GraphClusterEnd();
|
||||
this.taskNode = taskNode;
|
||||
|
||||
this.addNode(this.root);
|
||||
this.addNode(this._finally);
|
||||
this.addNode(this.afterExecution);
|
||||
this.addNode(this.end);
|
||||
|
||||
this.addEdge(this.getFinally(), this.getEnd(), new Relation());
|
||||
this.addEdge(this.getFinally(), this.getAfterExecution(), new Relation());
|
||||
this.addEdge(this.getAfterExecution(), this.getEnd(), new Relation());
|
||||
}
|
||||
|
||||
public void addNode(AbstractGraph node) {
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
package io.kestra.core.models.hierarchies;
|
||||
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import lombok.Getter;
|
||||
|
||||
@Getter
|
||||
public class GraphClusterAfterExecution extends AbstractGraph {
|
||||
public GraphClusterAfterExecution() {
|
||||
super("after-execution-" + IdUtils.create());
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@ public enum RelationType {
|
||||
CHOICE,
|
||||
ERROR,
|
||||
FINALLY,
|
||||
AFTER_EXECUTION,
|
||||
PARALLEL,
|
||||
DYNAMIC
|
||||
}
|
||||
|
||||
@@ -90,5 +90,10 @@ public class SubflowGraphTask extends AbstractGraphTask {
|
||||
public String getType() {
|
||||
return ((TaskInterface) subflowTask).getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return ((TaskInterface) subflowTask).getVersion();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
/**
|
||||
* Define a plugin properties that will be rendered and converted to a target type at use time.
|
||||
*
|
||||
@@ -136,12 +138,31 @@ public class Property<T> {
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asList(Class, Map)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T, I> T asList(Property<T> property, RunContext runContext, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.value == null) {
|
||||
String rendered = runContext.render(property.expression, variables);
|
||||
JavaType type = MAPPER.getTypeFactory().constructCollectionLikeType(List.class, itemClazz);
|
||||
try {
|
||||
property.value = MAPPER.readValue(rendered, type);
|
||||
String trimmedExpression = property.expression.trim();
|
||||
// We need to detect if the expression is already a list or if it's a pebble expression (for eg. referencing a variable containing a list).
|
||||
// Doing that allows us to, if it's an expression, first render then read it as a list.
|
||||
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
|
||||
property.value = MAPPER.readValue(runContext.render(property.expression, variables), type);
|
||||
}
|
||||
// Otherwise, if it's already a list, we read it as a list first then render it from run context which handle list rendering by rendering each item of the list
|
||||
else {
|
||||
List<?> asRawList = MAPPER.readValue(property.expression, List.class);
|
||||
property.value = (T) asRawList.stream()
|
||||
.map(throwFunction(item -> {
|
||||
if (item instanceof String str) {
|
||||
return MAPPER.convertValue(runContext.render(str, variables), itemClazz);
|
||||
} else if (item instanceof Map map) {
|
||||
return MAPPER.convertValue(runContext.render(map, variables), itemClazz);
|
||||
}
|
||||
return item;
|
||||
}))
|
||||
.toList();
|
||||
}
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalVariableEvaluationException(e);
|
||||
}
|
||||
@@ -169,6 +190,7 @@ public class Property<T> {
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class, Map)
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public static <T, K,V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.value == null) {
|
||||
JavaType targetMapType = MAPPER.getTypeFactory().constructMapType(Map.class, keyClass, valueClass);
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
package io.kestra.core.models.tasks;
|
||||
|
||||
public enum FileExistComportment {
|
||||
OVERWRITE, FAIL, WARN, IGNORE
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.core.models.tasks;
|
||||
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@@ -11,12 +10,14 @@ import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
import jakarta.validation.Valid;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
|
||||
@Builder
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Introspected
|
||||
@Jacksonized
|
||||
public class NamespaceFiles {
|
||||
@Schema(
|
||||
title = "Whether to enable namespace files to be loaded into the working directory. If explicitly set to `true` in a task, it will load all [Namespace Files](https://kestra.io/docs/developer-guide/namespace-files) into the task's working directory. Note that this property is by default set to `true` so that you can specify only the `include` and `exclude` properties to filter the files to load without having to explicitly set `enabled` to `true`."
|
||||
@@ -35,4 +36,16 @@ public class NamespaceFiles {
|
||||
)
|
||||
@Valid
|
||||
private Property<List<String>> exclude;
|
||||
|
||||
@Schema(
|
||||
title = "A list of namespaces in which searching files. The files are loaded in the namespace order, and only the latest version of a file is kept. Meaning if a file is present in the first and second namespace, only the file present on the second namespace will be loaded."
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<List<String>> namespaces = Property.of(List.of("{{flow.namespace}}"));
|
||||
|
||||
@Schema(
|
||||
title = "Comportment of the task if a file already exist in the working directory."
|
||||
)
|
||||
@Builder.Default
|
||||
private Property<FileExistComportment> ifExists = Property.of(FileExistComportment.OVERWRITE);
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user